From 7edec111cf3757efaa1cf1329d2574c16785e7a1 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Fri, 19 Jan 2024 15:48:32 +0100 Subject: [PATCH 01/17] Rework CI Visibility spans batching --- .../plugins/datadog/DatadogClient.java | 83 +---- .../datadog/DatadogGlobalConfiguration.java | 11 +- .../plugins/datadog/DatadogUtilities.java | 13 - .../datadog/clients/ClientFactory.java | 4 +- .../datadog/clients/DatadogAgentClient.java | 322 ++++-------------- ...gHttpClient.java => DatadogApiClient.java} | 229 ++++--------- .../plugins/datadog/clients/HttpClient.java | 25 +- .../listeners/DatadogBuildListener.java | 38 ++- .../listeners/DatadogGraphListener.java | 17 +- .../datadog/traces/DatadogBaseBuildLogic.java | 11 +- .../traces/DatadogBasePipelineLogic.java | 11 +- .../traces/DatadogTraceBuildLogic.java | 57 +--- .../traces/DatadogTracePipelineLogic.java | 62 ++-- .../traces/DatadogWebhookBuildLogic.java | 48 +-- .../traces/DatadogWebhookPipelineLogic.java | 51 +-- .../traces/mapper/JsonTraceSpanMapper.java | 89 ++--- .../datadog/traces/message/TraceSpan.java | 11 +- .../traces/write/AgentTraceWriteStrategy.java | 96 ++++++ .../traces/write/TraceWriteStrategy.java | 16 + .../traces/write/TraceWriteStrategyImpl.java | 56 +++ .../datadog/traces/write/TraceWriter.java | 118 +++++++ .../traces/write/TraceWriterFactory.java | 30 ++ .../plugins/datadog/transport/HttpClient.java | 12 - .../datadog/transport/HttpErrorHandler.java | 6 - .../datadog/transport/HttpMessage.java | 50 --- .../datadog/transport/HttpMessageFactory.java | 55 --- .../plugins/datadog/transport/HttpSender.java | 77 ----- .../transport/LoggerHttpErrorHandler.java | 17 - .../transport/NonBlockingHttpClient.java | 140 -------- .../datadog/transport/PayloadMapper.java | 10 - .../datadog/transport/PayloadMessage.java | 10 - .../plugins/datadog/util/CircuitBreaker.java | 83 +++++ .../datadog/clients/DatadogClientStub.java | 224 +++++------- .../datadog/clients/DatadogClientTest.java | 32 +- .../listeners/DatadogBuildListenerIT.java | 72 ++-- .../listeners/DatadogGraphListenerTest.java | 121 +++---- .../mapper/JsonTraceSpanMapperTest.java | 45 +-- .../datadog/transport/FakeHttpSender.java | 70 ---- .../transport/FakeTracesHttpClient.java | 85 ----- .../datadog/transport/HttpSenderTest.java | 44 --- 40 files changed, 899 insertions(+), 1652 deletions(-) rename src/main/java/org/datadog/jenkins/plugins/datadog/clients/{DatadogHttpClient.java => DatadogApiClient.java} (75%) create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpClient.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpErrorHandler.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessage.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessageFactory.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpSender.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/LoggerHttpErrorHandler.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/NonBlockingHttpClient.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMapper.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMessage.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java delete mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeHttpSender.java delete mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeTracesHttpClient.java delete mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/transport/HttpSenderTest.java diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogClient.java index b5508a66e..eb72d033a 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogClient.java @@ -26,24 +26,21 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog; import com.timgroup.statsd.ServiceCheck; -import hudson.model.Run; -import hudson.util.Secret; import java.util.Map; import java.util.Set; import org.datadog.jenkins.plugins.datadog.clients.Metrics; -import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; public interface DatadogClient { - public static enum ClientType { + enum ClientType { HTTP, DSD; - private ClientType() { } + ClientType() { } } - public static enum Status { + enum Status { OK(0), WARNING(1), CRITICAL(2), @@ -51,7 +48,7 @@ public static enum Status { private final int val; - private Status(int val) { + Status(int val) { this.val = val; } @@ -64,39 +61,13 @@ public ServiceCheck.Status toServiceCheckStatus(){ } } - public void setUrl(String url); - - public void setLogIntakeUrl(String logIntakeUrl); - - public void setWebhookIntakeUrl(String webhookIntakeUrl); - - public void setApiKey(Secret apiKey); - - public void setHostname(String hostname); - - public void setPort(Integer port); - - public void setLogCollectionPort(Integer logCollectionPort); - - public boolean isDefaultIntakeConnectionBroken(); - - public void setDefaultIntakeConnectionBroken(boolean defaultIntakeConnectionBroken); - - public boolean isLogIntakeConnectionBroken(); - - public boolean isWebhookIntakeConnectionBroken(); - - public void setLogIntakeConnectionBroken(boolean logIntakeConnectionBroken); - - public void setWebhookIntakeConnectionBroken(boolean webhookIntakeConnectionBroken); - /** * Sends an event to the Datadog API, including the event payload. * * @param event - a DatadogEvent object * @return a boolean to signify the success or failure of the HTTP POST request. */ - public boolean event(DatadogEvent event); + boolean event(DatadogEvent event); /** * Increment a counter for the given metrics. @@ -107,14 +78,14 @@ public ServiceCheck.Status toServiceCheckStatus(){ * @param tags - metric tags * @return a boolean to signify the success or failure of increment submission. */ - public boolean incrementCounter(String name, String hostname, Map> tags); + boolean incrementCounter(String name, String hostname, Map> tags); /** * Submit all your counters as rate with 10 seconds intervals. */ - public void flushCounters(); + void flushCounters(); - public Metrics metrics(); + Metrics metrics(); /** * Sends a service check to the Datadog API, including the check name, and status. @@ -125,45 +96,15 @@ public ServiceCheck.Status toServiceCheckStatus(){ * @param tags - A Map containing the tags to submit. * @return a boolean to signify the success or failure of the HTTP POST request. */ - public boolean serviceCheck(String name, Status status, String hostname, Map> tags); + boolean serviceCheck(String name, Status status, String hostname, Map> tags); /** * Send log message. * @param payload log payload to submit JSON object as String * @return a boolean to signify the success or failure of the request. */ - public boolean sendLogs(String payload); + boolean sendLogs(String payload); - /** - * Send a webhook payload to the webhooks intake. - * - * @param payload - A webhooks payload. - * @return a boolean to signify the success or failure of the HTTP POST request. - */ - public boolean postWebhook(String payload); - - /** - * Start the trace of a certain Jenkins build. - * @param buildData build data to use in the pipeline trace - * @param run a particular execution of a Jenkins build - * @return a boolean to signify the success or failure of the request. - */ - boolean startBuildTrace(BuildData buildData, Run run); - - /** - * Finish the trace of a certain Jenkins build. - * @param buildData build data to use in the pipeline trace - * @param run the run to create a pipeline trace for - * @return a boolean to signify the success or failure of the request. - */ - boolean finishBuildTrace(BuildData buildData, Run run); - - /** - * Send all traces related to a certain Jenkins pipeline. - * @param run a particular execution of a Jenkins build - * @param flowNode current flowNode - * @return a boolean to signify the success or failure of the request. - */ - boolean sendPipelineTrace(Run run, FlowNode flowNode); + TraceWriteStrategy createTraceWriteStrategy(); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfiguration.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfiguration.java index 7675ccc5f..54c3b3e60 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfiguration.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfiguration.java @@ -44,13 +44,14 @@ of this software and associated documentation files (the "Software"), to deal import com.cloudbees.plugins.credentials.common.StandardListBoxModel; import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder; import org.datadog.jenkins.plugins.datadog.clients.HttpClient; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; import org.jenkinsci.plugins.plaincredentials.StringCredentials; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.math.NumberUtils; import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; -import org.datadog.jenkins.plugins.datadog.clients.DatadogHttpClient; +import org.datadog.jenkins.plugins.datadog.clients.DatadogApiClient; import org.datadog.jenkins.plugins.datadog.clients.DatadogAgentClient; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.config.DatadogAgentConfiguration; @@ -445,7 +446,7 @@ public FormValidation doTestConnection( throws IOException, ServletException { Jenkins.getInstance().checkPermission(Jenkins.ADMINISTER); final Secret secret = findSecret(targetApiKey, targetCredentialsApiKey); - if (DatadogHttpClient.validateDefaultIntakeConnection(new HttpClient(60_000), targetApiURL, secret)) { + if (DatadogApiClient.validateDefaultIntakeConnection(new HttpClient(60_000), targetApiURL, secret)) { return FormValidation.ok("Great! Your API key is valid."); } else { return FormValidation.error("Hmmm, your API key seems to be invalid."); @@ -839,9 +840,9 @@ public boolean configure(final StaplerRequest req, final JSONObject formData) th if(client == null) { return false; } - client.setDefaultIntakeConnectionBroken(false); - client.setLogIntakeConnectionBroken(false); - client.setWebhookIntakeConnectionBroken(false); + + TraceWriterFactory.onDatadogClientUpdate(client); + // Persist global configuration information save(); return true; diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java index d4035b93c..a2107fb06 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java @@ -987,19 +987,6 @@ public static boolean isPipeline(final Run run) { return run != null && run.getAction(IsPipelineAction.class) != null; } - /** - * Returns an HTTP URL - * - * @param hostname - the Hostname - * @param port - the port to use - * @param path - the path - * @return the HTTP URL - * @throws MalformedURLException if the URL is not in a valid format - */ - public static URL buildHttpURL(final String hostname, final Integer port, final String path) throws MalformedURLException { - return new URL(String.format("http://%s:%d" + path, hostname, port)); - } - public static String getCatchErrorResult(BlockStartNode startNode) { String displayFunctionName = startNode.getDisplayFunctionName(); if ("warnError".equals(displayFunctionName)) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/ClientFactory.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/ClientFactory.java index b8c2a83bc..90ec8d161 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/ClientFactory.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/ClientFactory.java @@ -29,6 +29,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; public class ClientFactory { private static DatadogClient testClient; @@ -36,6 +37,7 @@ public class ClientFactory { public static void setTestClient(DatadogClient testClient){ // Only used for tests ClientFactory.testClient = testClient; + TraceWriterFactory.onDatadogClientUpdate(testClient); } public static DatadogClient getClient(DatadogClient.ClientType type, String apiUrl, String logIntakeUrl, @@ -47,7 +49,7 @@ public static DatadogClient getClient(DatadogClient.ClientType type, String apiU } switch(type){ case HTTP: - return DatadogHttpClient.getInstance(apiUrl, logIntakeUrl, webhookIntakeUrl, apiKey); + return DatadogApiClient.getInstance(apiUrl, logIntakeUrl, webhookIntakeUrl, apiKey); case DSD: return DatadogAgentClient.getInstance(host, port, logCollectionPort, traceCollectionPort); default: diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java index c75816d3e..b73756625 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java @@ -25,26 +25,22 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.clients; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.buildHttpURL; -import static org.datadog.jenkins.plugins.datadog.transport.LoggerHttpErrorHandler.LOGGER_HTTP_ERROR_HANDLER; - import com.timgroup.statsd.Event; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.ServiceCheck; import com.timgroup.statsd.StatsDClient; -import hudson.model.Run; -import hudson.util.Secret; import java.net.ConnectException; import java.net.InetAddress; import java.net.Socket; -import java.net.URL; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import java.util.logging.Handler; import java.util.logging.Logger; import java.util.logging.SocketHandler; @@ -53,21 +49,16 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic; -import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; -import org.datadog.jenkins.plugins.datadog.transport.HttpMessage; -import org.datadog.jenkins.plugins.datadog.transport.HttpMessageFactory; -import org.datadog.jenkins.plugins.datadog.transport.NonBlockingHttpClient; -import org.datadog.jenkins.plugins.datadog.transport.PayloadMessage; +import org.datadog.jenkins.plugins.datadog.traces.write.AgentTraceWriteStrategy; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategyImpl; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; -import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.json.JSONArray; import org.json.JSONObject; @@ -80,43 +71,26 @@ public class DatadogAgentClient implements DatadogClient { private static volatile DatadogAgentClient instance = null; // Used to determine if the instance failed last validation last time, so // we do not keep retrying to create the instance and logging the same error - private static boolean failedLastValidation = false; + private static volatile boolean failedLastValidation = false; private static final Logger logger = Logger.getLogger(DatadogAgentClient.class.getName()); - @SuppressFBWarnings(value="MS_SHOULD_BE_FINAL") public static boolean enableValidations = true; - private org.datadog.jenkins.plugins.datadog.transport.HttpClient agentHttpClient; - private DatadogBaseBuildLogic traceBuildLogic; - private DatadogBasePipelineLogic tracePipelineLogic; - private StatsDClient statsd; private Logger ddLogger; private String previousPayload; - private String hostname = null; + private final String hostname; + private final Integer port; + private final Integer logCollectionPort; + private final Integer traceCollectionPort; private String resolvedIp = ""; - private Integer port = null; - private Integer logCollectionPort = null; - private Integer traceCollectionPort = null; private boolean isStoppedStatsDClient = true; - private boolean isStoppedAgentHttpClient = true; - private boolean evpProxySupported = false; - private long lastEvpProxyCheckTimeMs = 0L; private final HttpClient client; - /** - * How often to check the /info endpoint in case the Agent got updated. - */ - private static final int EVP_PROXY_SUPPORT_TIME_BETWEEN_CHECKS_MS = 1*60*60*1000; - - /** - * Timeout waiting for a reply after a connection to the /info endpoint was established. - */ - private static final int HTTP_TIMEOUT_INFO_MS = 10 * 1000; /** * Timeout of 1 minutes for connecting and reading via the synchronous Agent EVP Proxy. @@ -163,7 +137,6 @@ public static DatadogClient getInstance(String hostname, Integer port, Integer l if (instance != null){ instance.reinitializeStatsDClient(true); instance.reinitializeLogger(true); - instance.reinitializeAgentHttpClient(true); } return instance; } @@ -199,7 +172,6 @@ public void validateConfiguration() throws IllegalArgumentException { if (DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility() && traceCollectionPort == null) { logger.warning("Datadog Trace Collection Port is not set properly"); } - return; } @Override @@ -365,120 +337,6 @@ public Set fetchAgentSupportedEndpoints() { } } - /** - * Posts a given payload to the Agent EVP Proxy so it is forwarded to the Webhook Intake. - * - * @param payload - A webhooks payload. - * @return a boolean to signify the success or failure of the HTTP POST request. - */ - @SuppressFBWarnings("REC_CATCH_EXCEPTION") - @Override - public boolean postWebhook(String payload) { - logger.fine("Sending webhook"); - - if(!evpProxySupported){ - logger.severe("Trying to send a webhook but the Agent doesn't support it."); - return false; - } - - DatadogGlobalConfiguration datadogGlobalDescriptor = DatadogUtilities.getDatadogGlobalDescriptor(); - String urlParameters = datadogGlobalDescriptor != null ? "?service=" + datadogGlobalDescriptor.getCiInstanceName() : ""; - String url = String.format("http://%s:%d/evp_proxy/v1/api/v2/webhook/%s", hostname, traceCollectionPort, urlParameters); - - Map headers = new HashMap<>(); - headers.put("X-Datadog-EVP-Subdomain", "webhook-intake"); - headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - - byte[] body = payload.getBytes(StandardCharsets.UTF_8); - - try { - client.postAsynchronously(url, headers, "application/json", body); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Error while posting webhook"); - return false; - } - } - - /** - * reinitialize the Tracer Client - * @param force - force to reinitialize - * @return true if reinitialized properly otherwise false - */ - protected boolean reinitializeAgentHttpClient(boolean force) { - if(!this.isStoppedAgentHttpClient && this.traceBuildLogic != null && this.tracePipelineLogic != null && !force) { - return true; - } - - if(!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility() || this.getHostname() == null || this.getTraceCollectionPort() == null) { - return false; - } - - this.stopAgentHttpClient(); - try { - logger.info("Re/Initialize Datadog-Plugin Agent Http Client"); - - // Build - final URL tracesURL = buildHttpURL(this.getHostname(), this.getTraceCollectionPort(), "/v0.3/traces"); - this.agentHttpClient = NonBlockingHttpClient.builder() - .errorHandler(LOGGER_HTTP_ERROR_HANDLER) - .messageRoute(PayloadMessage.Type.TRACE, HttpMessageFactory.builder() - .agentURL(tracesURL) - .httpMethod(HttpMessage.HttpMethod.PUT) - .payloadMapper(new JsonTraceSpanMapper()) - .build()) - .build(); - - this.isStoppedAgentHttpClient = false; - return true; - } catch (Throwable e){ - DatadogUtilities.severe(logger, e, "Failed to reinitialize Datadog-Plugin Agent Http Client"); - this.stopAgentHttpClient(); - return false; - } - } - - protected boolean checkEvpProxySupportAndUpdateLogic() { - if (evpProxySupported) { - return true; // Once we have seen an Agent that supports EVP Proxy, we never check again. - } - if (System.currentTimeMillis() < (lastEvpProxyCheckTimeMs + EVP_PROXY_SUPPORT_TIME_BETWEEN_CHECKS_MS)) { - return evpProxySupported; // Wait at least 1 hour between checks, return the cached value - } - synchronized (DatadogAgentClient.class) { - if (!evpProxySupported) { - logger.info("Checking for EVP Proxy support in the Agent."); - Set supportedAgentEndpoints = fetchAgentSupportedEndpoints(); - evpProxySupported = supportedAgentEndpoints.contains("/evp_proxy/v3/"); - lastEvpProxyCheckTimeMs = System.currentTimeMillis(); - if (evpProxySupported) { - logger.info("EVP Proxy is supported by the Agent. We will not check again until the next boot."); - traceBuildLogic = new DatadogWebhookBuildLogic(this); - tracePipelineLogic = new DatadogWebhookPipelineLogic(this); - } else { - logger.info("The Agent doesn't support EVP Proxy, falling back to APM for CI Visibility. Requires Agent v6.42+ or 7.42+."); - traceBuildLogic = new DatadogTraceBuildLogic(this.agentHttpClient); - tracePipelineLogic = new DatadogTracePipelineLogic(this.agentHttpClient); - } - } - } - return evpProxySupported; - } - - private boolean stopAgentHttpClient() { - if(agentHttpClient != null) { - try { - this.agentHttpClient.stop(); - } catch (Throwable e) { - DatadogUtilities.severe(logger, e, "Failed to stop Agent Http Client"); - return false; - } - this.agentHttpClient = null; - } - this.isStoppedAgentHttpClient = true; - return true; - } - private boolean stopStatsDClient(){ if (this.statsd != null){ try{ @@ -498,87 +356,18 @@ public String getHostname() { return hostname; } - @Override - public void setHostname(String hostname) { - this.hostname = hostname; - } - public Integer getPort() { return port; } - @Override - public void setPort(Integer port) { - this.port = port; - } - public Integer getLogCollectionPort() { return logCollectionPort; } - @Override - public void setLogCollectionPort(Integer logCollectionPort) { - this.logCollectionPort = logCollectionPort; - } - public Integer getTraceCollectionPort() { return traceCollectionPort; } - @Override - public void setUrl(String url) { - // noop - } - - @Override - public void setLogIntakeUrl(String logIntakeUrl) { - // noop - } - - @Override - public void setWebhookIntakeUrl(String webhookIntakeUrl) { - // noop - } - - @Override - public void setApiKey(Secret apiKey){ - // noop - } - - @Override - public boolean isDefaultIntakeConnectionBroken() { - return false; - } - - @Override - public void setDefaultIntakeConnectionBroken(boolean defaultIntakeConnectionBroken) { - // noop - } - - @Override - public boolean isLogIntakeConnectionBroken() { - return false; - } - - @Override - public void setLogIntakeConnectionBroken(boolean logIntakeConnectionBroken) { - // noop - } - - @Override - public boolean isWebhookIntakeConnectionBroken() { - return false; - } - - @Override - public void setWebhookIntakeConnectionBroken(boolean webhookIntakeConnectionBroken) { - // noop - } - - public boolean isEvpProxySupported() { - return evpProxySupported; - } - @Override public boolean event(DatadogEvent event) { try { @@ -734,63 +523,68 @@ public boolean sendLogs(String payload) { } @Override - public boolean startBuildTrace(BuildData buildData, Run run) { - try { - boolean status = reinitializeAgentHttpClient(false); - if(!status) { - return false; - } + public TraceWriteStrategy createTraceWriteStrategy() { + TraceWriteStrategyImpl evpStrategy = new TraceWriteStrategyImpl(new DatadogWebhookBuildLogic(), new DatadogWebhookPipelineLogic(), this::sendSpansToWebhook); + TraceWriteStrategyImpl apmStrategy = new TraceWriteStrategyImpl(new DatadogTraceBuildLogic(), new DatadogTracePipelineLogic(), this::sendSpansToApm); + return new AgentTraceWriteStrategy(evpStrategy, apmStrategy, this::isEvpProxySupported); + } - checkEvpProxySupportAndUpdateLogic(); + boolean isEvpProxySupported() { + logger.info("Checking for EVP Proxy support in the Agent."); + Set supportedAgentEndpoints = fetchAgentSupportedEndpoints(); + return supportedAgentEndpoints.contains("/evp_proxy/v3/"); + } - logger.fine("Started build trace"); - this.traceBuildLogic.startBuildTrace(buildData, run); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Failed to start build trace"); - reinitializeAgentHttpClient(true); - return false; + /** + * Posts a given payload to the Agent EVP Proxy, so it is forwarded to the Webhook Intake. + */ + private void sendSpansToWebhook(List spans) { + for (net.sf.json.JSONObject span : spans) { + // webhook intake does not support batch requests + postWebhook(span.toString()); } } + /** + * Posts a given payload to the Agent EVP Proxy, so it is forwarded to the Webhook Intake. + */ + private void postWebhook(String payload) { + logger.fine("Sending webhook"); - @Override - public boolean finishBuildTrace(BuildData buildData, Run run) { - try { - boolean status = reinitializeAgentHttpClient(false); - if(!status) { - return false; - } + DatadogGlobalConfiguration datadogGlobalDescriptor = DatadogUtilities.getDatadogGlobalDescriptor(); + String urlParameters = datadogGlobalDescriptor != null ? "?service=" + datadogGlobalDescriptor.getCiInstanceName() : ""; + String url = String.format("http://%s:%d/evp_proxy/v1/api/v2/webhook/%s", hostname, traceCollectionPort, urlParameters); - checkEvpProxySupportAndUpdateLogic(); + Map headers = new HashMap<>(); + headers.put("X-Datadog-EVP-Subdomain", "webhook-intake"); + headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - logger.fine("Finished build trace"); - this.traceBuildLogic.finishBuildTrace(buildData, run); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Failed to finish build trace"); - reinitializeAgentHttpClient(true); - return false; - } + byte[] body = payload.getBytes(StandardCharsets.UTF_8); + client.postAsynchronously(url, headers, "application/json", body); } - @Override - public boolean sendPipelineTrace(Run run, FlowNode flowNode) { + private void sendSpansToApm(List spans) { try { - boolean status = reinitializeAgentHttpClient(false); - if(!status) { - return false; + Map tracesById = new HashMap<>(); + for (net.sf.json.JSONObject span : spans) { + tracesById.computeIfAbsent(span.getString(JsonTraceSpanMapper.TRACE_ID), k -> new net.sf.json.JSONArray()).add(span); } - checkEvpProxySupportAndUpdateLogic(); + final JSONArray jsonTraces = new JSONArray(); + for(net.sf.json.JSONArray trace : tracesById.values()) { + jsonTraces.put(trace); + } + byte[] payload = jsonTraces.toString().getBytes(StandardCharsets.UTF_8); - logger.fine("Send pipeline traces."); - this.tracePipelineLogic.execute(run, flowNode); - return true; - } catch (Exception e){ - DatadogUtilities.severe(logger, e, "Failed to send pipeline trace"); - reinitializeAgentHttpClient(true); - return false; + String tracesUrl = String.format("http://%s:%d/v0.3/traces", hostname, port); + client.put(tracesUrl, Collections.emptyMap(), "application/json", payload, Function.identity()); + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Interrupted while sending trace", e); + + } catch (Exception e) { + throw new RuntimeException("Error while sending trace", e); } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogHttpClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java similarity index 75% rename from src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogHttpClient.java rename to src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java index 9c76b3b8e..e67ccec68 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogHttpClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java @@ -25,12 +25,12 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.clients; -import hudson.model.Run; import hudson.util.Secret; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; @@ -45,25 +45,25 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategyImpl; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; -import org.jenkinsci.plugins.workflow.graph.FlowNode; /** * This class is used to collect all methods that has to do with transmitting * data to Datadog. */ -public class DatadogHttpClient implements DatadogClient { +public class DatadogApiClient implements DatadogClient { - private static volatile DatadogHttpClient instance = null; + private static volatile DatadogApiClient instance = null; // Used to determine if the instance failed last validation last time, so // we do not keep retrying to create the instance and logging the same error private static boolean failedLastValidation = false; - private static final Logger logger = Logger.getLogger(DatadogHttpClient.class.getName()); + private static final Logger logger = Logger.getLogger(DatadogApiClient.class.getName()); private static final String EVENT = "v1/events"; private static final String METRIC = "v1/series"; @@ -80,15 +80,13 @@ public class DatadogHttpClient implements DatadogClient { @SuppressFBWarnings(value="MS_SHOULD_BE_FINAL") public static boolean enableValidations = true; - private String url = null; - private String logIntakeUrl = null; - private String webhookIntakeUrl = null; - private Secret apiKey = null; + private final String url; + private final String logIntakeUrl; + private final String webhookIntakeUrl; + private final Secret apiKey; private boolean defaultIntakeConnectionBroken = false; private boolean logIntakeConnectionBroken = false; private boolean webhookIntakeConnectionBroken = false; - private DatadogWebhookBuildLogic webhookBuildLogic; - private DatadogWebhookPipelineLogic webhookPipelineLogic; private final HttpClient httpClient; @@ -105,22 +103,22 @@ public static DatadogClient getInstance(String url, String logIntakeUrl, String // If the configuration has not changed, return the current instance without validation // since we've already validated and/or errored about the data - DatadogHttpClient newInstance = new DatadogHttpClient(url, logIntakeUrl, webhookIntakeUrl, apiKey); + DatadogApiClient newInstance = new DatadogApiClient(url, logIntakeUrl, webhookIntakeUrl, apiKey); if (instance != null && instance.equals(newInstance)) { - if (DatadogHttpClient.failedLastValidation) { + if (DatadogApiClient.failedLastValidation) { return null; } return instance; } if (enableValidations) { - synchronized (DatadogHttpClient.class) { - DatadogHttpClient.instance = newInstance; + synchronized (DatadogApiClient.class) { + DatadogApiClient.instance = newInstance; try { newInstance.validateConfiguration(); - DatadogHttpClient.failedLastValidation = false; + DatadogApiClient.failedLastValidation = false; } catch(IllegalArgumentException e){ logger.severe(e.getMessage()); - DatadogHttpClient.failedLastValidation = true; + DatadogApiClient.failedLastValidation = true; return null; } } @@ -128,13 +126,11 @@ public static DatadogClient getInstance(String url, String logIntakeUrl, String return newInstance; } - private DatadogHttpClient(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey) { + private DatadogApiClient(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey) { this.url = url; this.apiKey = apiKey; this.logIntakeUrl = logIntakeUrl; this.webhookIntakeUrl = webhookIntakeUrl; - this.webhookBuildLogic = new DatadogWebhookBuildLogic(this); - this.webhookPipelineLogic = new DatadogWebhookPipelineLogic(this); this.httpClient = new HttpClient(HTTP_TIMEOUT_MS); } @@ -152,11 +148,11 @@ public void validateConfiguration() throws IllegalArgumentException { try { boolean logConnection = validateLogIntakeConnection(); if (!logConnection) { - instance.setLogIntakeConnectionBroken(true); + this.logIntakeConnectionBroken = true; logger.warning("Connection broken, please double check both your Log Intake URL and Key"); } } catch (IOException e) { - instance.setLogIntakeConnectionBroken(true); + this.logIntakeConnectionBroken = true; logger.warning("Connection broken, please double check both your Log Intake URL and Key: " + e); } } @@ -168,18 +164,18 @@ public void validateConfiguration() throws IllegalArgumentException { try { boolean webhookConnection = validateWebhookIntakeConnection(); if (!webhookConnection) { - instance.setWebhookIntakeConnectionBroken(true); + this.webhookIntakeConnectionBroken = true; logger.warning("Connection broken, please double check both your Webhook Intake URL and Key"); } } catch (IOException e) { - instance.setWebhookIntakeConnectionBroken(true); + this.webhookIntakeConnectionBroken = true; logger.warning("Connection broken, please double check both your Webhook Intake URL and Key: " + e); } } boolean intakeConnection = validateDefaultIntakeConnection(httpClient, url, apiKey); if (!intakeConnection) { - instance.setDefaultIntakeConnectionBroken(true); + this.defaultIntakeConnectionBroken = true; throw new IllegalArgumentException("Connection broken, please double check both your API URL and Key"); } } @@ -189,11 +185,11 @@ public boolean equals(Object object) { if (object == this) { return true; } - if (!(object instanceof DatadogHttpClient)) { + if (!(object instanceof DatadogApiClient)) { return false; } - DatadogHttpClient newInstance = (DatadogHttpClient) object; + DatadogApiClient newInstance = (DatadogApiClient) object; return StringUtils.equals(getLogIntakeUrl(), newInstance.getLogIntakeUrl()) && StringUtils.equals(getWebhookIntakeUrl(), newInstance.getWebhookIntakeUrl()) @@ -214,86 +210,21 @@ public String getUrl() { return url; } - @Override - public void setUrl(String url) { - this.url = url; - } - public String getLogIntakeUrl() { return logIntakeUrl; } - @Override - public void setLogIntakeUrl(String logIntakeUrl) { - this.logIntakeUrl = logIntakeUrl; - } - public String getWebhookIntakeUrl() { return webhookIntakeUrl; } - @Override - public void setWebhookIntakeUrl(String webhookIntakeUrl) { - this.webhookIntakeUrl = webhookIntakeUrl; - } - public Secret getApiKey() { return apiKey; } - @Override - public void setApiKey(Secret apiKey) { - this.apiKey = apiKey; - } - - @Override - public void setHostname(String hostname) { - // noop - } - - @Override - public void setPort(Integer port) { - // noop - } - - @Override - public void setLogCollectionPort(Integer logCollectionPort) { - // noop - } - - @Override - public boolean isDefaultIntakeConnectionBroken() { - return defaultIntakeConnectionBroken; - } - - @Override - public void setDefaultIntakeConnectionBroken(boolean defaultIntakeConnectionBroken) { - this.defaultIntakeConnectionBroken = defaultIntakeConnectionBroken; - } - - @Override - public boolean isLogIntakeConnectionBroken() { - return logIntakeConnectionBroken; - } - - @Override - public void setLogIntakeConnectionBroken(boolean logIntakeConnectionBroken) { - this.logIntakeConnectionBroken = logIntakeConnectionBroken; - } - - @Override - public boolean isWebhookIntakeConnectionBroken() { - return webhookIntakeConnectionBroken; - } - - @Override - public void setWebhookIntakeConnectionBroken(boolean webhookIntakeConnectionBroken) { - this.webhookIntakeConnectionBroken = webhookIntakeConnectionBroken; - } - public boolean event(DatadogEvent event) { logger.fine("Sending event"); - if(this.isDefaultIntakeConnectionBroken()){ + if(this.defaultIntakeConnectionBroken){ logger.severe("Your client is not initialized properly"); return false; } @@ -319,7 +250,7 @@ public boolean event(DatadogEvent event) { @Override public boolean incrementCounter(String name, String hostname, Map> tags) { - if(this.isDefaultIntakeConnectionBroken()){ + if(this.defaultIntakeConnectionBroken){ logger.severe("Your client is not initialized properly"); return false; } @@ -449,7 +380,7 @@ public boolean serviceCheck(String name, Status status, String hostname, Map headers = new HashMap<>(); - headers.put("DD-API-KEY", Secret.toString(apiKey)); - headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - - byte[] body = payload.getBytes(StandardCharsets.UTF_8); - - try { - httpClient.postAsynchronously(url, headers, "application/json", body); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Failed to post webhook"); - return false; - } - } - public static boolean validateDefaultIntakeConnection(HttpClient client, String validatedUrl, Secret apiKey) { String urlParameters = "?api_key=" + Secret.toString(apiKey); String url = validatedUrl + VALIDATE + urlParameters; @@ -557,14 +454,14 @@ public static boolean validateDefaultIntakeConnection(HttpClient client, String } } - public boolean validateLogIntakeConnection() throws IOException { + private boolean validateLogIntakeConnection() throws IOException { return postLogs("{\"message\":\"[datadog-plugin] Check connection\", " + "\"ddsource\":\"Jenkins\", \"service\":\"Jenkins\", " + "\"hostname\":\""+DatadogUtilities.getHostname(null)+"\"}"); } @SuppressFBWarnings("DLS_DEAD_LOCAL_STORE") - public boolean validateWebhookIntakeConnection() throws IOException { + private boolean validateWebhookIntakeConnection() throws IOException { String url = getWebhookIntakeUrl(); Map headers = new HashMap<>(); @@ -587,50 +484,38 @@ public boolean validateWebhookIntakeConnection() throws IOException { } @Override - public boolean startBuildTrace(BuildData buildData, Run run) { - if(this.isWebhookIntakeConnectionBroken()){ - logger.severe("Unable to start build trace; your client is not initialized properly."); - return false; - } - try { - logger.fine("Started build trace"); - this.webhookBuildLogic.startBuildTrace(buildData, run); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Failed to start build trace"); - return false; - } + public TraceWriteStrategy createTraceWriteStrategy() { + return new TraceWriteStrategyImpl(new DatadogWebhookBuildLogic(), new DatadogWebhookPipelineLogic(), this::sendSpans); } - @Override - public boolean finishBuildTrace(BuildData buildData, Run run) { - if(this.isWebhookIntakeConnectionBroken()){ - logger.severe("Unable to finish build trace; your client is not initialized properly."); - return false; - } - try { - logger.fine("Finished build trace"); - this.webhookBuildLogic.finishBuildTrace(buildData, run); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Failed to finish build trace"); - return false; + private void sendSpans(List spans) { + for (JSONObject span : spans) { + // webhook intake does not support batch requests + postWebhook(span.toString()); } } - @Override - public boolean sendPipelineTrace(Run run, FlowNode flowNode) { - if(this.isWebhookIntakeConnectionBroken()){ - logger.severe("Unable to send pipeline trace; your client is not initialized properly"); - return false; - } - try { - logger.fine("Send pipeline traces."); - this.webhookPipelineLogic.execute(run, flowNode); - return true; - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Failed to send pipeline trace"); - return false; + /** + * Posts a given payload to the Datadog Webhook Intake, using the user configured apiKey. + * + * @param payload - A webhook payload. + */ + private void postWebhook(String payload) { + logger.fine("Sending webhook"); + + if (this.webhookIntakeConnectionBroken) { + throw new RuntimeException("Your client is not initialized properly; webhook intake connection is broken."); } + + DatadogGlobalConfiguration datadogGlobalDescriptor = DatadogUtilities.getDatadogGlobalDescriptor(); + String urlParameters = datadogGlobalDescriptor != null ? "?service=" + datadogGlobalDescriptor.getCiInstanceName() : ""; + String url = getWebhookIntakeUrl() + urlParameters; + + Map headers = new HashMap<>(); + headers.put("DD-API-KEY", Secret.toString(apiKey)); + headers.put("DD-CI-PROVIDER-NAME", "jenkins"); + + byte[] body = payload.getBytes(StandardCharsets.UTF_8); + httpClient.postAsynchronously(url, headers, "application/json", body); } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/HttpClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/HttpClient.java index e72babe77..a2604a974 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/HttpClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/HttpClient.java @@ -2,7 +2,6 @@ import java.io.IOException; import java.io.InputStream; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; @@ -19,7 +18,6 @@ import java.util.regex.Pattern; import jenkins.model.Jenkins; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.transport.HttpMessage; import org.eclipse.jetty.client.HttpProxy; import org.eclipse.jetty.client.Origin; import org.eclipse.jetty.client.ProxyConfiguration; @@ -204,6 +202,13 @@ public void getBinary(String url, Map headers, Consumer T put(String url, Map headers, String contentType, byte[] body, Function responseParser) throws ExecutionException, InterruptedException, TimeoutException { + return executeSynchronously( + requestSupplier(url, HttpMethod.PUT, headers, contentType, body), + retryPolicyFactory.create(), + responseParser); + } + public T post(String url, Map headers, String contentType, byte[] body, Function responseParser) throws ExecutionException, InterruptedException, TimeoutException { return executeSynchronously( requestSupplier(url, HttpMethod.POST, headers, contentType, body), @@ -223,22 +228,6 @@ public void postAsynchronously(String url, Map headers, String c ); } - public void sendAsynchronously(HttpMessage message) { - String url = message.getURL().toString(); - HttpMethod httpMethod = HttpMethod.fromString(message.getMethod().name()); - String contentType = message.getContentType(); - byte[] body = message.getPayload(); - executeAsynchronously( - requestSupplier( - url, - httpMethod, - Collections.emptyMap(), - contentType, - body), - retryPolicyFactory.create() - ); - } - private Supplier requestSupplier(String url, HttpMethod method, Map headers, String contentType, byte[] body) { return () -> { Request request = CLIENT diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java index 9b6dba828..10ecf4d2c 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java @@ -58,11 +58,16 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.events.BuildFinishedEventImpl; import org.datadog.jenkins.plugins.datadog.events.BuildStartedEventImpl; import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; +import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; +import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; - import org.datadog.jenkins.plugins.datadog.traces.BuildSpanManager; import org.datadog.jenkins.plugins.datadog.traces.StepDataAction; +import org.datadog.jenkins.plugins.datadog.traces.StepTraceDataAction; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriter; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; import org.jenkinsci.plugins.workflow.job.WorkflowRun; @@ -123,13 +128,32 @@ public void onInitialize(Run run) { run.addAction(stepDataAction); // Traces - client.startBuildTrace(buildData, run); + startBuildTrace(buildData, run); logger.fine("End DatadogBuildListener#onInitialize"); } catch (Exception e) { DatadogUtilities.severe(logger, e, "Failed to process build initialization"); } } + private void startBuildTrace(final BuildData buildData, Run run) { + if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { + logger.fine("CI Visibility is disabled"); + return; + } + + final StepTraceDataAction stepTraceDataAction = new StepTraceDataAction(); + run.addAction(stepTraceDataAction); + + final StageBreakdownAction stageBreakdownAction = new StageBreakdownAction(); + run.addAction(stageBreakdownAction); + + final PipelineQueueInfoAction pipelineQueueInfoAction = new PipelineQueueInfoAction(); + run.addAction(pipelineQueueInfoAction); + + final CIGlobalTagsAction ciGlobalTags = new CIGlobalTagsAction(buildData.getTagsForTraces()); + run.addAction(ciGlobalTags); + } + /** * Called before the SCMCheckout is run in a Jenkins build. * This method is called after onInitialize callback. @@ -365,9 +389,8 @@ public void onFinalized(Run run) { } logger.fine("Start DatadogBuildListener#onFinalized"); - // Get Datadog Client Instance - DatadogClient client = getDatadogClient(); - if (client == null) { + TraceWriter traceWriter = TraceWriterFactory.getTraceWriter(); + if (traceWriter == null) { return; } @@ -381,11 +404,14 @@ public void onFinalized(Run run) { } // APM Traces - client.finishBuildTrace(buildData, run); + traceWriter.submitBuild(buildData, run); logger.fine("End DatadogBuildListener#onFinalized"); BuildSpanManager.get().remove(buildData.getBuildTag("")); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + DatadogUtilities.severe(logger, e, "Interrupted while processing build finalization"); } catch (Exception e) { DatadogUtilities.severe(logger, e, "Failed to process build finalization"); } finally { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java index a6e8f6f9c..226216c80 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java @@ -65,6 +65,8 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; import org.datadog.jenkins.plugins.datadog.model.StageData; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriter; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; import org.jenkinsci.plugins.gitclient.GitClient; @@ -105,14 +107,23 @@ public void onNewHead(FlowNode flowNode) { } } - //APM Traces + TraceWriter traceWriter = TraceWriterFactory.getTraceWriter(); + if (traceWriter != null) { + try { + traceWriter.submitPipeline(flowNode, run); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + DatadogUtilities.severe(logger, e, "Interrupted while submitting pipeline trace for node " + flowNode.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); + } catch (Exception e) { + DatadogUtilities.severe(logger, e, "Error while submitting pipeline trace for node " + flowNode.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); + } + } + DatadogClient client = ClientFactory.getClient(); if (client == null){ return; } - client.sendPipelineTrace(run, flowNode); - if (!isMonitored(flowNode)) { return; } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java index 0dad1d522..d5852d3d2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java @@ -1,5 +1,7 @@ package org.datadog.jenkins.plugins.datadog.traces; +import hudson.model.Cause; +import hudson.model.Run; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -7,7 +9,8 @@ import java.util.Map; import java.util.Set; import java.util.logging.Logger; - +import javax.annotation.Nullable; +import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; @@ -15,8 +18,6 @@ import org.datadog.jenkins.plugins.datadog.model.StageData; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.json.JsonUtils; -import hudson.model.Cause; -import hudson.model.Run; /** * Base class for DatadogTraceBuildLogic and DatadogPipelineBuildLogic @@ -27,8 +28,8 @@ public abstract class DatadogBaseBuildLogic { private static final int MAX_TAG_LENGTH = 5000; private static final Logger logger = Logger.getLogger(DatadogBaseBuildLogic.class.getName()); - public abstract void finishBuildTrace(final BuildData buildData, final Run run); - public abstract void startBuildTrace(final BuildData buildData, Run run); + @Nullable + public abstract JSONObject finishBuildTrace(final BuildData buildData, final Run run); protected String getNodeName(Run run, BuildData buildData, BuildData updatedBuildData) { final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java index a948f5947..54526597d 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java @@ -1,12 +1,16 @@ package org.datadog.jenkins.plugins.datadog.traces; +import hudson.model.Run; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeoutException; import java.util.logging.Logger; - +import javax.annotation.Nonnull; +import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; import org.datadog.jenkins.plugins.datadog.model.BuildData; @@ -20,8 +24,6 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner; -import hudson.model.Run; - /** * Base class with shared code for DatadogTracePipelineLogic and DatadogWebhookPipelineLogic */ @@ -31,7 +33,8 @@ public abstract class DatadogBasePipelineLogic { protected static final String HOSTNAME_NONE = "none"; private static final Logger logger = Logger.getLogger(DatadogBasePipelineLogic.class.getName()); - public abstract void execute(Run run, FlowNode flowNode); + @Nonnull + public abstract Collection execute(FlowNode flowNode, Run run); protected BuildPipelineNode buildPipelineTree(FlowEndNode flowEndNode) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java index 25da2f87e..518abceda 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java @@ -8,24 +8,20 @@ import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag; import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidCommit; -import java.util.Collections; +import hudson.model.Result; +import hudson.model.Run; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; - +import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; -import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; +import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.transport.HttpClient; - -import hudson.model.Result; -import hudson.model.Run; /** * Keeps the logic to send traces related to Jenkins Build. @@ -35,52 +31,28 @@ public class DatadogTraceBuildLogic extends DatadogBaseBuildLogic { private static final Logger logger = Logger.getLogger(DatadogTraceBuildLogic.class.getName()); - private final HttpClient agentHttpClient; - - public DatadogTraceBuildLogic(final HttpClient agentHttpClient) { - this.agentHttpClient = agentHttpClient; - } + private final JsonTraceSpanMapper jsonTraceSpanMapper = new JsonTraceSpanMapper(); @Override - public void startBuildTrace(final BuildData buildData, Run run) { - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - logger.fine("CI Visibility is disabled"); - return; - } - - // Traces - if(this.agentHttpClient == null) { - logger.severe("Unable to send build traces. Tracer is null"); - return; - } - - final StepTraceDataAction stepTraceDataAction = new StepTraceDataAction(); - run.addAction(stepTraceDataAction); - - final StageBreakdownAction stageBreakdownAction = new StageBreakdownAction(); - run.addAction(stageBreakdownAction); - - final PipelineQueueInfoAction pipelineQueueInfoAction = new PipelineQueueInfoAction(); - run.addAction(pipelineQueueInfoAction); - - final CIGlobalTagsAction ciGlobalTags = new CIGlobalTagsAction(buildData.getTagsForTraces()); - run.addAction(ciGlobalTags); + public JSONObject finishBuildTrace(final BuildData buildData, final Run run) { + TraceSpan span = createSpan(buildData, run); + return span != null ? jsonTraceSpanMapper.map(span) : null; } - @Override - public void finishBuildTrace(final BuildData buildData, final Run run) { + // hook for tests + public TraceSpan createSpan(final BuildData buildData, final Run run) { if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return; + return null; } final TraceSpan buildSpan = BuildSpanManager.get().get(buildData.getBuildTag("")); if(buildSpan == null) { - return; + return null; } final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); if(buildSpanAction == null) { - return; + return null; } // In this point of the execution, the BuildData stored within @@ -263,7 +235,8 @@ public void finishBuildTrace(final BuildData buildData, final Run run) { // When the root span starts, we don't have the propagated queue time yet. We need to wait till the // end of the pipeline execution and do it in the endTime, adjusting all child spans if needed. buildSpan.setEndNano(TimeUnit.MICROSECONDS.toNanos(endTimeMicros - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue))); - agentHttpClient.send(Collections.singletonList(buildSpan)); + + return buildSpan; } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java index f2956702c..cc6ae4596 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java @@ -9,29 +9,29 @@ import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag; +import hudson.model.Run; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.logging.Logger; - +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; +import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.transport.HttpClient; -import org.datadog.jenkins.plugins.datadog.transport.PayloadMessage; import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; import org.jenkinsci.plugins.workflow.graph.FlowEndNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import hudson.model.Run; - /** * Keeps the logic to send traces related to inner jobs of Jenkins Pipelines (datadog levels: stage and job). @@ -39,23 +39,19 @@ */ public class DatadogTracePipelineLogic extends DatadogBasePipelineLogic { - private static final Logger logger = Logger.getLogger(DatadogTracePipelineLogic.class.getName()); - - private final HttpClient agentHttpClient; + private final JsonTraceSpanMapper jsonTraceSpanMapper = new JsonTraceSpanMapper(); - public DatadogTracePipelineLogic(HttpClient agentHttpClient) { - this.agentHttpClient = agentHttpClient; + @Nonnull + @Override + public Collection execute(FlowNode flowNode, Run run) { + Collection traces = collectTraces(flowNode, run); + return traces.stream().map(jsonTraceSpanMapper::map).collect(Collectors.toList()); } - @Override - public void execute(Run run, FlowNode flowNode) { + // hook for tests + public Collection collectTraces(FlowNode flowNode, Run run) { if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return; - } - - if(this.agentHttpClient == null) { - logger.severe("Unable to send pipeline traces. Tracer is null"); - return; + return Collections.emptySet(); } final IsPipelineAction isPipelineAction = run.getAction(IsPipelineAction.class); @@ -65,40 +61,34 @@ public void execute(Run run, FlowNode flowNode) { final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); if(buildSpanAction == null) { - return; + return Collections.emptySet(); } final BuildData buildData = buildSpanAction.getBuildData(); if(!DatadogUtilities.isLastNode(flowNode)){ updateCIGlobalTags(run); - return; + return Collections.emptySet(); } final TraceSpan.TraceSpanContext traceSpanContext = buildSpanAction.getBuildSpanContext(); final BuildPipelineNode root = buildPipelineTree((FlowEndNode) flowNode); - final List spanBuffer = new ArrayList<>(); - collectTraces(run, spanBuffer, buildData, root, traceSpanContext); - try { - if(!spanBuffer.isEmpty()) { - this.agentHttpClient.send(spanBuffer); - } - } catch (Exception e){ - logger.severe("Unable to send traces. Exception:" + e); + return collectTraces(run, buildData, root, traceSpanContext); } finally { // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. cleanUpTraceActions(run); } } - private void collectTraces(final Run run, final List spanBuffer, final BuildData buildData, final BuildPipelineNode current, final TraceSpan.TraceSpanContext parentSpanContext) { + private Collection collectTraces(final Run run, final BuildData buildData, final BuildPipelineNode current, final TraceSpan.TraceSpanContext parentSpanContext) { if(!isTraceable(current)) { + Collection traces = new ArrayList<>(); // If the current node is not traceable, we continue with its children for(final BuildPipelineNode child : current.getChildren()) { - collectTraces(run, spanBuffer, buildData, child, parentSpanContext); + traces.addAll(collectTraces(run, buildData, child, parentSpanContext)); } - return; + return traces; } // If the root span has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines spans @@ -136,8 +126,9 @@ private void collectTraces(final Run run, final List spanBuffer, } } + Collection traces = new ArrayList<>(); for(final BuildPipelineNode child : current.getChildren()) { - collectTraces(run, spanBuffer, buildData, child, span.context()); + traces.addAll(collectTraces(run, buildData, child, span.context())); } //Logs @@ -145,7 +136,8 @@ private void collectTraces(final Run run, final List spanBuffer, span.setEndNano(fixedEndTimeNanos); - spanBuffer.add(span); + traces.add(span); + return traces; } private Map buildTraceMetrics(BuildPipelineNode current) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java index c2ecb4591..6e0cc9313 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java @@ -6,27 +6,21 @@ import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag; import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidCommit; +import hudson.model.Run; import java.util.Date; import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.logging.Logger; - +import net.sf.json.JSONArray; +import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; -import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; -import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import hudson.model.Run; -import net.sf.json.JSONArray; -import net.sf.json.JSONObject; - /** * Keeps the logic to send webhooks related to Jenkins Build. * This gets called once per job (datadog level: pipeline) @@ -35,46 +29,20 @@ public class DatadogWebhookBuildLogic extends DatadogBaseBuildLogic { private static final Logger logger = Logger.getLogger(DatadogWebhookBuildLogic.class.getName()); - private final DatadogClient client; - - public DatadogWebhookBuildLogic(final DatadogClient client) { - this.client = client; - } - - @Override - public void startBuildTrace(final BuildData buildData, Run run) { - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - logger.fine("CI Visibility is disabled"); - return; - } - - final StepTraceDataAction stepTraceDataAction = new StepTraceDataAction(); - run.addAction(stepTraceDataAction); - - final StageBreakdownAction stageBreakdownAction = new StageBreakdownAction(); - run.addAction(stageBreakdownAction); - - final PipelineQueueInfoAction pipelineQueueInfoAction = new PipelineQueueInfoAction(); - run.addAction(pipelineQueueInfoAction); - - final CIGlobalTagsAction ciGlobalTags = new CIGlobalTagsAction(buildData.getTagsForTraces()); - run.addAction(ciGlobalTags); - } - @Override - public void finishBuildTrace(final BuildData buildData, final Run run) { + public JSONObject finishBuildTrace(final BuildData buildData, final Run run) { if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return; + return null; } final TraceSpan buildSpan = BuildSpanManager.get().get(buildData.getBuildTag("")); if(buildSpan == null) { - return; + return null; } final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); if(buildSpanAction == null) { - return; + return null; } // In this point of the execution, the BuildData stored within @@ -285,7 +253,7 @@ public void finishBuildTrace(final BuildData buildData, final Run run) { payload.put("git", gitPayload); } - client.postWebhook(payload.toString()); + return payload; } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java index 7afd20659..9d5e641e2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java @@ -6,15 +6,20 @@ import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag; +import hudson.model.Run; import java.io.PrintWriter; import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - +import javax.annotation.Nonnull; +import net.sf.json.JSONArray; +import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; -import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; @@ -24,27 +29,18 @@ import org.jenkinsci.plugins.workflow.graph.FlowEndNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import hudson.model.Run; -import net.sf.json.JSONArray; -import net.sf.json.JSONObject; - /** * Keeps the logic to send webhooks related to inner jobs of Jenkins Pipelines (datadog levels: stage and job). * The top-level job (datadog level: pipeline) is handled by DatadogWebhookBuildLogic */ public class DatadogWebhookPipelineLogic extends DatadogBasePipelineLogic { - private final DatadogClient client; - - public DatadogWebhookPipelineLogic(final DatadogClient client) { - this.client = client; - } - + @Nonnull @Override - public void execute(Run run, FlowNode flowNode) { + public Collection execute(FlowNode flowNode, Run run) { if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return; + return Collections.emptySet(); } final IsPipelineAction isPipelineAction = run.getAction(IsPipelineAction.class); @@ -54,31 +50,34 @@ public void execute(Run run, FlowNode flowNode) { final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); if(buildSpanAction == null) { - return; + return Collections.emptySet(); } final BuildData buildData = buildSpanAction.getBuildData(); if(!DatadogUtilities.isLastNode(flowNode)){ updateCIGlobalTags(run); - return; + return Collections.emptySet(); } final TraceSpan.TraceSpanContext traceSpanContext = buildSpanAction.getBuildSpanContext(); final BuildPipelineNode root = buildPipelineTree((FlowEndNode) flowNode); - collectTraces(run, buildData, root, null, traceSpanContext); - - // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. - cleanUpTraceActions(run); + try { + return collectTraces(run, buildData, root, null, traceSpanContext); + } finally { + // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. + cleanUpTraceActions(run); + } } - private void collectTraces(final Run run, final BuildData buildData, final BuildPipelineNode current, final BuildPipelineNode parent, final TraceSpan.TraceSpanContext parentSpanContext) { + private Collection collectTraces(final Run run, final BuildData buildData, final BuildPipelineNode current, final BuildPipelineNode parent, final TraceSpan.TraceSpanContext parentSpanContext) { if(!isTraceable(current)) { + Collection traces = new ArrayList<>(); // If the current node is not traceable, we continue with its children for(final BuildPipelineNode child : current.getChildren()) { - collectTraces(run, buildData, child, parent, parentSpanContext); + traces.addAll(collectTraces(run, buildData, child, parent, parentSpanContext)); } - return; + return traces; } // If the root has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines // because this time will be subtracted in the root. See DatadogTraceBuildLogic#finishBuildTrace method. @@ -314,11 +313,13 @@ private void collectTraces(final Run run, final BuildData buildData, final Build payload.put("tags", tagsPayload); } + Collection traces = new ArrayList<>(); for(final BuildPipelineNode child : current.getChildren()) { - collectTraces(run, buildData, child, current, span.context()); + traces.addAll(collectTraces(run, buildData, child, current, span.context())); } - client.postWebhook(payload.toString()); + traces.add(payload); + return traces; } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapper.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapper.java index dfdc9c9cb..bbe580841 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapper.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapper.java @@ -1,22 +1,16 @@ package org.datadog.jenkins.plugins.datadog.traces.mapper; -import net.sf.json.JSONArray; +import java.util.Map; import net.sf.json.JSONObject; -import org.datadog.jenkins.plugins.datadog.transport.PayloadMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - /** * PayloadMapper to transform TraceSpan into JSON object * following the spec: https://docs.datadoghq.com/api/latest/tracing/ */ -public class JsonTraceSpanMapper implements PayloadMapper> { +public class JsonTraceSpanMapper { - static final String TRACE_ID = "trace_id"; + public static final String TRACE_ID = "trace_id"; static final String SPAN_ID = "span_id"; static final String PARENT_ID = "parent_id"; @@ -31,61 +25,40 @@ public class JsonTraceSpanMapper implements PayloadMapper> { static final String START = "start"; static final String DURATION = "duration"; - @Override - public byte[] map(final List spans) { - final Map tracesMap = new HashMap<>(); - for(final TraceSpan span : spans) { - JSONArray jsonArray = tracesMap.get(span.context().getTraceId()); - if(jsonArray == null){ - jsonArray = new JSONArray(); - tracesMap.put(span.context().getTraceId(), jsonArray); - } - - final JSONObject jsonSpan = new JSONObject(); - jsonSpan.put(TRACE_ID, span.context().getTraceId()); - jsonSpan.put(SPAN_ID, span.context().getSpanId()); - if(span.context().getParentId() != 0){ - jsonSpan.put(PARENT_ID, span.context().getParentId()); - } - - if(span.isError()){ - jsonSpan.put(ERROR, 1); - } - - jsonSpan.put(OPERATION_NAME, span.getOperationName()); - jsonSpan.put(RESOURCE_NAME, span.getResourceName()); - jsonSpan.put(SERVICE_NAME, span.getServiceName()); - jsonSpan.put(SPAN_TYPE, span.getType()); - - final JSONObject jsonMeta = new JSONObject(); - final Map meta = span.getMeta(); - for(Map.Entry metaEntry : meta.entrySet()) { - jsonMeta.put(metaEntry.getKey(), metaEntry.getValue()); - } - jsonSpan.put(META, jsonMeta); + public JSONObject map(final TraceSpan span) { + final JSONObject jsonSpan = new JSONObject(); + jsonSpan.put(TRACE_ID, span.context().getTraceId()); + jsonSpan.put(SPAN_ID, span.context().getSpanId()); + if(span.context().getParentId() != 0){ + jsonSpan.put(PARENT_ID, span.context().getParentId()); + } - final JSONObject jsonMetrics = new JSONObject(); - final Map metrics = span.getMetrics(); - for(Map.Entry metric : metrics.entrySet()){ - jsonMetrics.put(metric.getKey(), metric.getValue()); - } - jsonSpan.put(METRICS, jsonMetrics); + if(span.isError()){ + jsonSpan.put(ERROR, 1); + } - jsonSpan.put(START, span.getStartNano()); - jsonSpan.put(DURATION, span.getDurationNano()); + jsonSpan.put(OPERATION_NAME, span.getOperationName()); + jsonSpan.put(RESOURCE_NAME, span.getResourceName()); + jsonSpan.put(SERVICE_NAME, span.getServiceName()); + jsonSpan.put(SPAN_TYPE, span.getType()); - jsonArray.add(jsonSpan); + final JSONObject jsonMeta = new JSONObject(); + final Map meta = span.getMeta(); + for(Map.Entry metaEntry : meta.entrySet()) { + jsonMeta.put(metaEntry.getKey(), metaEntry.getValue()); } + jsonSpan.put(META, jsonMeta); - final JSONArray jsonTraces = new JSONArray(); - for(Map.Entry traceEntry : tracesMap.entrySet()) { - jsonTraces.add(traceEntry.getValue()); + final JSONObject jsonMetrics = new JSONObject(); + final Map metrics = span.getMetrics(); + for(Map.Entry metric : metrics.entrySet()){ + jsonMetrics.put(metric.getKey(), metric.getValue()); } - return jsonTraces.toString().getBytes(StandardCharsets.UTF_8); - } + jsonSpan.put(METRICS, jsonMetrics); + + jsonSpan.put(START, span.getStartNano()); + jsonSpan.put(DURATION, span.getDurationNano()); - @Override - public String contentType() { - return "application/json"; + return jsonSpan; } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java index 3f9b4dbfb..062b6a188 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java @@ -1,13 +1,11 @@ package org.datadog.jenkins.plugins.datadog.traces.message; -import org.datadog.jenkins.plugins.datadog.traces.IdGenerator; -import org.datadog.jenkins.plugins.datadog.transport.PayloadMessage; - import java.io.Serializable; import java.util.HashMap; import java.util.Map; +import org.datadog.jenkins.plugins.datadog.traces.IdGenerator; -public class TraceSpan implements PayloadMessage { +public class TraceSpan { public static final String PRIORITY_SAMPLING_KEY = "_sampling_priority_v1"; @@ -113,11 +111,6 @@ public boolean isError() { return error; } - @Override - public PayloadMessage.Type getMessageType() { - return PayloadMessage.Type.TRACE; - } - public static class TraceSpanContext implements Serializable { private static final long serialVersionUID = 1L; diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java new file mode 100644 index 000000000..790268f9d --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java @@ -0,0 +1,96 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +import hudson.model.Run; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; +import java.util.logging.Logger; +import net.sf.json.JSONObject; +import org.datadog.jenkins.plugins.datadog.clients.DatadogAgentClient; +import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.jenkinsci.plugins.workflow.graph.FlowNode; + +/** + * Trace write strategy that can dynamically switch from using APM track to using EVP Proxy. + * The switch will happen if an older agent (one that doesn't support EVP Proxy) is replaced with a newer agent. + */ +public class AgentTraceWriteStrategy implements TraceWriteStrategy { + + private static final Logger logger = Logger.getLogger(AgentTraceWriteStrategy.class.getName()); + + /** + * How often to check the /info endpoint in case the Agent got updated. + */ + private static final long EVP_PROXY_SUPPORT_TIME_BETWEEN_CHECKS_MS = TimeUnit.HOURS.toMillis(1); + + private final TraceWriteStrategy evpProxyStrategy; + private final TraceWriteStrategy apmStrategy; + private final Supplier checkEvpProxySupport; + private volatile boolean evpProxySupported = false; + private volatile long lastEvpProxyCheckTimeMs = 0L; + + public AgentTraceWriteStrategy(TraceWriteStrategy evpProxyStrategy, TraceWriteStrategy apmStrategy, Supplier checkEvpProxySupport) { + this.evpProxyStrategy = evpProxyStrategy; + this.apmStrategy = apmStrategy; + this.checkEvpProxySupport = checkEvpProxySupport; + } + + @Override + public JSONObject serialize(BuildData buildData, Run run) { + return getCurrentStrategy().serialize(buildData, run); + } + + @Override + public Collection serialize(FlowNode flowNode, Run run) { + return getCurrentStrategy().serialize(flowNode, run); + } + + @Override + public void send(List spans) { + // we have to check serialized spans to know where to send them, + // because the serialization strategy might've changed in between serialize() and send() + if (isWebhook(spans)) { + evpProxyStrategy.send(spans); + } else { + apmStrategy.send(spans); + } + } + + private boolean isWebhook(List spans) { + if (spans.isEmpty()) { + return false; + } + JSONObject span = spans.iterator().next(); + return span.get("level") != null; + } + + private TraceWriteStrategy getCurrentStrategy() { + if (isEvpProxySupported()) { + return evpProxyStrategy; + } else { + return apmStrategy; + } + } + + private boolean isEvpProxySupported() { + if (evpProxySupported) { + return true; // Once we have seen an Agent that supports EVP Proxy, we never check again. + } + if (System.currentTimeMillis() < (lastEvpProxyCheckTimeMs + EVP_PROXY_SUPPORT_TIME_BETWEEN_CHECKS_MS)) { + return evpProxySupported; // Wait at least 1 hour between checks, return the cached value + } + synchronized (DatadogAgentClient.class) { + if (!evpProxySupported) { + evpProxySupported = checkEvpProxySupport.get(); + lastEvpProxyCheckTimeMs = System.currentTimeMillis(); + if (evpProxySupported) { + logger.info("EVP Proxy is supported by the Agent. We will not check again until the next boot."); + } else { + logger.info("The Agent doesn't support EVP Proxy, falling back to APM for CI Visibility. Requires Agent v6.42+ or 7.42+."); + } + } + } + return evpProxySupported; + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java new file mode 100644 index 000000000..c73a7f7a4 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java @@ -0,0 +1,16 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +import hudson.model.Run; +import java.util.Collection; +import java.util.List; +import net.sf.json.JSONObject; +import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.jenkinsci.plugins.workflow.graph.FlowNode; + +public interface TraceWriteStrategy { + JSONObject serialize(BuildData buildData, Run run); + + Collection serialize(FlowNode flowNode, Run run); + + void send(List spans); +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java new file mode 100644 index 000000000..ce6e822ae --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java @@ -0,0 +1,56 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +import hudson.model.Run; +import java.util.Collection; +import java.util.List; +import java.util.function.Consumer; +import java.util.logging.Logger; +import net.sf.json.JSONObject; +import org.datadog.jenkins.plugins.datadog.DatadogUtilities; +import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic; +import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic; +import org.datadog.jenkins.plugins.datadog.util.CircuitBreaker; +import org.jenkinsci.plugins.workflow.graph.FlowNode; + +public class TraceWriteStrategyImpl implements TraceWriteStrategy { + + private static final Logger logger = Logger.getLogger(TraceWriteStrategyImpl.class.getName()); + + private final DatadogBaseBuildLogic buildLogic; + private final DatadogBasePipelineLogic pipelineLogic; + private final CircuitBreaker> sendSpansCircuitBreaker; + + public TraceWriteStrategyImpl(DatadogBaseBuildLogic buildLogic, DatadogBasePipelineLogic pipelineLogic, Consumer> spansSender) { + this.buildLogic = buildLogic; + this.pipelineLogic = pipelineLogic; + this.sendSpansCircuitBreaker = new CircuitBreaker<>( + spansSender, + this::logTransportBroken, + this::logTransportError + ); + } + + @Override + public JSONObject serialize(final BuildData buildData, final Run run) { + return buildLogic.finishBuildTrace(buildData, run); + } + + @Override + public Collection serialize(FlowNode flowNode, Run run) { + return pipelineLogic.execute(flowNode, run); + } + + @Override + public void send(List spans) { + sendSpansCircuitBreaker.accept(spans); + } + + private void logTransportBroken(List spans) { + logger.fine("Ignoring " + spans.size() + " because transport is broken"); + } + + private void logTransportError(Exception e) { + DatadogUtilities.severe(logger, e, "Error while sending trace"); + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java new file mode 100644 index 000000000..fc532fe05 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -0,0 +1,118 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +import hudson.model.Run; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.logging.Logger; +import net.sf.json.JSONObject; +import org.datadog.jenkins.plugins.datadog.DatadogClient; +import org.datadog.jenkins.plugins.datadog.DatadogUtilities; +import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.jenkinsci.plugins.workflow.graph.FlowNode; + +public class TraceWriter { + + private static final Logger logger = Logger.getLogger(TraceWriter.class.getName()); + + private static final String QUEUE_CAPACITY_ENV_VAR = "DD_JENKINS_TRACES_QUEUE_CAPACITY"; + private static final String SUBMIT_TIMEOUT_ENV_VAR = "DD_JENKINS_TRACES_SUBMIT_TIMEOUT_SECONDS"; + private static final String STOP_TIMEOUT_ENV_VAR = "DD_JENKINS_TRACES_STOP_TIMEOUT_SECONDS"; + private static final String POLLING_INTERVAL_ENV_VAR = "DD_JENKINS_TRACES_POLLING_INTERVAL_SECONDS"; + private static final String BATCH_SIZE_LIMIT_ENV_VAR = "DD_JENKINS_TRACES_BATCH_SIZE_LIMIT"; + private static final int DEFAULT_QUEUE_CAPACITY = 10_000; + private static final int DEFAULT_SUBMIT_TIMEOUT_SECONDS = 30; + private static final int DEFAULT_STOP_TIMEOUT_SECONDS = 15; + private static final int DEFAULT_POLLING_INTERVAL_SECONDS = 10; + private static final int DEFAULT_BATCH_SIZE_LIMIT = 100; + + private final TraceWriteStrategy traceWriteStrategy; + private final BlockingQueue queue; + private final Thread poller; + + public TraceWriter(DatadogClient datadogClient) { + this.traceWriteStrategy = datadogClient.createTraceWriteStrategy(); + + this.queue = new ArrayBlockingQueue<>(getEnv(QUEUE_CAPACITY_ENV_VAR, DEFAULT_QUEUE_CAPACITY)); + + this.poller = new Thread(new QueuePoller(traceWriteStrategy, queue), "DD-Trace-Writer"); + this.poller.setDaemon(true); + this.poller.start(); + } + + public void submitBuild(final BuildData buildData, final Run run) throws InterruptedException, TimeoutException { + JSONObject buildJson = traceWriteStrategy.serialize(buildData, run); + submit(buildJson); + } + + public void submitPipeline(FlowNode flowNode, Run run) throws InterruptedException, TimeoutException { + Collection nodeJsons = traceWriteStrategy.serialize(flowNode, run); + for (JSONObject nodeJson : nodeJsons) { + submit(nodeJson); + } + } + + private void submit(JSONObject json) throws InterruptedException, TimeoutException { + if (!queue.offer(json, getEnv(SUBMIT_TIMEOUT_ENV_VAR, DEFAULT_SUBMIT_TIMEOUT_SECONDS), TimeUnit.SECONDS)) { + throw new TimeoutException("Timed out while submitting span"); + } + } + + public void stop() { + poller.interrupt(); + } + + private static final class QueuePoller implements Runnable { + private final TraceWriteStrategy traceWriteStrategy; + private final BlockingQueue queue; + + public QueuePoller(TraceWriteStrategy traceWriteStrategy, BlockingQueue queue) { + this.traceWriteStrategy = traceWriteStrategy; + this.queue = queue; + } + + @Override + public void run() { + long shutdownAt = Long.MAX_VALUE; + while (System.currentTimeMillis() < shutdownAt) { + try { + JSONObject span = queue.poll(getEnv(POLLING_INTERVAL_ENV_VAR, DEFAULT_POLLING_INTERVAL_SECONDS), TimeUnit.SECONDS); + if (span == null) { + continue; // nothing to send + } + + int batchSize = getEnv(BATCH_SIZE_LIMIT_ENV_VAR, DEFAULT_BATCH_SIZE_LIMIT); + List spans = new ArrayList<>(batchSize); + spans.add(span); + queue.drainTo(spans, batchSize - 1); + + traceWriteStrategy.send(spans); + + } catch (InterruptedException e) { + logger.info("Queue poller thread interrupted"); + shutdownAt = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(getEnv(STOP_TIMEOUT_ENV_VAR, DEFAULT_STOP_TIMEOUT_SECONDS)); + + } catch (Exception e) { + DatadogUtilities.severe(logger, e, "Error while sending trace"); + } + } + logger.info("Queue poller thread shut down"); + } + } + + private static int getEnv(String envVar, int defaultValue) { + String value = System.getenv(envVar); + if (value != null) { + try { + return Integer.parseInt(value); + } catch (Exception e) { + DatadogUtilities.severe(logger, null, "Invalid value " + value + " provided for env var " + envVar + ": integer number expected"); + } + } + return defaultValue; + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java new file mode 100644 index 000000000..d8691f3dd --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java @@ -0,0 +1,30 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +import javax.annotation.Nullable; +import org.datadog.jenkins.plugins.datadog.DatadogClient; +import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; + +public class TraceWriterFactory { + + private static volatile TraceWriter TRACE_WRITER; + + public static synchronized void onDatadogClientUpdate(@Nullable DatadogClient client) { + if (client == null) { + return; + } + + if (TRACE_WRITER != null) { + TRACE_WRITER.stop(); + } + + TRACE_WRITER = new TraceWriter(client); + } + + @Nullable + public static TraceWriter getTraceWriter() { + if (TRACE_WRITER == null) { + onDatadogClientUpdate(ClientFactory.getClient()); + } + return TRACE_WRITER; + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpClient.java deleted file mode 100644 index 0e2802334..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpClient.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import java.util.List; - -public interface HttpClient { - - void send(List messages); - - void stop(); - - void close(); -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpErrorHandler.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpErrorHandler.java deleted file mode 100644 index 3231e2ba8..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpErrorHandler.java +++ /dev/null @@ -1,6 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -public interface HttpErrorHandler { - - void handle(Exception exception); -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessage.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessage.java deleted file mode 100644 index 886cd713b..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessage.java +++ /dev/null @@ -1,50 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; - -import java.net.URL; - -@SuppressFBWarnings -public class HttpMessage { - - private final URL url; - private final HttpMethod method; - private final String contentType; - private final byte[] payload; - - public HttpMessage(URL url, HttpMethod method, String contentType, byte[] payload) { - this.url = url; - this.method = method; - this.contentType = contentType; - this.payload = payload; - } - - public URL getURL() { - return this.url; - } - - public HttpMethod getMethod() { - return this.method; - } - - public String getContentType() { - return this.contentType; - } - - public byte[] getPayload() { - return this.payload; - } - - @Override - public String toString() { - return "HttpMessage{" + - "url=" + url + - ", method=" + method + - ", contentType='" + contentType + '\'' + - '}'; - } - - public enum HttpMethod { - PUT - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessageFactory.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessageFactory.java deleted file mode 100644 index f1126d2a8..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpMessageFactory.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import java.net.URL; -import java.util.List; - -public class HttpMessageFactory { - - private final URL url; - private final HttpMessage.HttpMethod httpMethod; - private final PayloadMapper> payloadMapper; - - private HttpMessageFactory(final Builder builder) { - this.url = builder.agentURL; - this.httpMethod = builder.httpMethod; - this.payloadMapper = builder.payloadMapper; - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private URL agentURL; - private HttpMessage.HttpMethod httpMethod; - private PayloadMapper> payloadMapper; - - public Builder agentURL(URL agentURL) { - this.agentURL = agentURL; - return this; - } - - public Builder httpMethod(final HttpMessage.HttpMethod httpMethod) { - this.httpMethod = httpMethod; - return this; - } - - public Builder payloadMapper(final PayloadMapper payloadMapper) { - this.payloadMapper = payloadMapper; - return this; - } - - public HttpMessageFactory build() { - return new HttpMessageFactory(this); - } - } - - public URL getURL() { - return this.url; - } - - public HttpMessage create(List messages) { - return new HttpMessage(this.url, this.httpMethod, this.payloadMapper.contentType(), this.payloadMapper.map(messages)); - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpSender.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpSender.java deleted file mode 100644 index 6d0a0da13..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/HttpSender.java +++ /dev/null @@ -1,77 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.TimeUnit; -import java.util.logging.Logger; -import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.clients.HttpClient; - -public class HttpSender implements Runnable { - - private static final Logger logger = Logger.getLogger(HttpSender.class.getName()); - - private final BlockingQueue queue; - private final HttpErrorHandler errorHandler; - private final HttpClient client; - - private volatile boolean shutdown; - - HttpSender(final int queueSize, final HttpErrorHandler errorHandler, final int httpTimeoutMs) { - this(new ArrayBlockingQueue<>(queueSize), errorHandler, httpTimeoutMs); - } - - HttpSender(final BlockingQueue queue, final HttpErrorHandler errorHandler, final int httpTimeoutMs) { - this.queue = queue; - this.errorHandler = errorHandler; - this.client = new HttpClient(httpTimeoutMs); - } - - boolean send(final HttpMessage message){ - if (shutdown) { - return false; - } - try { - queue.put(message); - return true; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - return false; - } - } - - @Override - public void run() { - // Consumer loop. - // Consume till shutdown=true and queue is empty. - while (!queue.isEmpty() || !shutdown) { - try { - // Try to retrieve (and remove) the head of the queue - // with 1 second of timeout to avoid blocking - // the thread indefinitely. - final HttpMessage message = queue.poll(1, TimeUnit.SECONDS); - if (null != message) { - process(message); - } - } catch (final InterruptedException e) { - if (shutdown) { - return; - } - } catch (final Exception e) { - errorHandler.handle(e); - } - } - } - - protected void process(HttpMessage message) { - try { - client.sendAsynchronously(message); - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Error while sending message: " + message); - } - } - - void shutdown() { - shutdown = true; - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/LoggerHttpErrorHandler.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/LoggerHttpErrorHandler.java deleted file mode 100644 index 8e32db7e3..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/LoggerHttpErrorHandler.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import org.datadog.jenkins.plugins.datadog.DatadogUtilities; - -import java.util.logging.Logger; - -public class LoggerHttpErrorHandler implements HttpErrorHandler{ - - public static final HttpErrorHandler LOGGER_HTTP_ERROR_HANDLER = new LoggerHttpErrorHandler(); - - private static final Logger logger = Logger.getLogger(LoggerHttpErrorHandler.class.getName()); - - @Override - public void handle(Exception exception) { - DatadogUtilities.severe(logger, exception, exception.getMessage()); - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/NonBlockingHttpClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/NonBlockingHttpClient.java deleted file mode 100644 index 960c44534..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/NonBlockingHttpClient.java +++ /dev/null @@ -1,140 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; -import java.util.logging.Logger; - -public class NonBlockingHttpClient implements HttpClient { - - private static final int DEFAULT_TIMEOUT_MS = 10 * 1000; - private static final int DEFAULT_MAX_QUEUE_SIZE = 10_000; - private static final int SIZE_SPANS_SEND_BUFFER = 100; - - private static final Logger logger = Logger.getLogger(NonBlockingHttpClient.class.getName()); - - private static final HttpErrorHandler NO_OP_HANDLER = new HttpErrorHandler() { - @Override public void handle(final Exception e) { /* No-op */ } - }; - - private final HttpErrorHandler errorHandler; - private final HttpSender sender; - private final Map messageFactoryByType; - - private final ExecutorService executor = Executors.newSingleThreadExecutor(new ThreadFactory() { - final ThreadFactory delegate = Executors.defaultThreadFactory(); - @Override public Thread newThread(final Runnable r) { - final Thread result = delegate.newThread(r); - result.setName("DDNonBlockingHttpClient-" + result.getName()); - result.setDaemon(true); - return result; - } - }); - - private NonBlockingHttpClient(final Builder builder) { - final int queueSize = builder.queueSize != null ? builder.queueSize : DEFAULT_MAX_QUEUE_SIZE; - final int httpTimeoutMs = builder.httpTimeoutMs != null ? builder.httpTimeoutMs : DEFAULT_TIMEOUT_MS; - this.errorHandler = builder.errorHandler != null ? builder.errorHandler : NO_OP_HANDLER; - this.messageFactoryByType = builder.messageFactoryByType; - this.sender = createSender(queueSize, errorHandler, httpTimeoutMs); - executor.submit(sender); - - if(this.messageFactoryByType != null) { - for(Map.Entry messageFactoryEntry : messageFactoryByType.entrySet()) { - logger.info(messageFactoryEntry.getKey() + " -> " + messageFactoryEntry.getValue().getURL()); - } - } - } - - public static Builder builder() { - return new Builder(); - } - - private HttpSender createSender(final int queueSize, final HttpErrorHandler errorHandler, final int httpTimeoutMs) { - return new HttpSender(queueSize, errorHandler, httpTimeoutMs); - } - - public void send(List messages) { - if(messages != null && !messages.isEmpty()) { - final List spanSendBuffer = new ArrayList<>(SIZE_SPANS_SEND_BUFFER); - for(int i = 0; i < messages.size(); i++) { - spanSendBuffer.add(messages.get(i)); - - // Send every 100 spans or the last one. - if(spanSendBuffer.size() == SIZE_SPANS_SEND_BUFFER || i == (messages.size() - 1)) { - final List buffer = Collections.unmodifiableList(spanSendBuffer); - // We assume all payload messages belong to the same message type for now. - final PayloadMessage.Type type = buffer.get(0).getMessageType(); - final HttpMessage message = this.messageFactoryByType.get(type).create(buffer); - this.sender.send(message); - spanSendBuffer.clear(); - } - } - } - } - - @Override - public void stop() { - try { - sender.shutdown(); - executor.shutdown(); - try { - executor.awaitTermination(30, TimeUnit.SECONDS); - if (!executor.isTerminated()) { - executor.shutdownNow(); - } - } catch (Exception e) { - errorHandler.handle(e); - if (!executor.isTerminated()) { - executor.shutdownNow(); - } - } - } catch (final Exception e) { - errorHandler.handle(e); - } - } - - @Override - public void close() { - stop(); - } - - public static class Builder { - - private HttpErrorHandler errorHandler; - private Integer queueSize; - private Integer httpTimeoutMs; - private Map messageFactoryByType = new HashMap<>(); - - public Builder errorHandler(final HttpErrorHandler errorHandler) { - this.errorHandler = errorHandler; - return this; - } - - public Builder queueSize(final int queueSize) { - this.queueSize = queueSize; - return this; - } - - public Builder messageRoute(final PayloadMessage.Type key, final HttpMessageFactory messageFactory) { - this.messageFactoryByType.put(key, messageFactory); - return this; - } - - public Builder httpTimeoutMs(final int httpTimeoutMs) { - this.httpTimeoutMs = httpTimeoutMs; - return this; - } - - public NonBlockingHttpClient build() { - return new NonBlockingHttpClient(this); - } - - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMapper.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMapper.java deleted file mode 100644 index 18c9f1285..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMapper.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import java.util.List; - -public interface PayloadMapper> { - - byte[] map(final T list); - - String contentType(); -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMessage.java b/src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMessage.java deleted file mode 100644 index 3d0aa556a..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/transport/PayloadMessage.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -public interface PayloadMessage { - - Type getMessageType(); - - enum Type { - TRACE - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java new file mode 100644 index 000000000..0cd5d5126 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java @@ -0,0 +1,83 @@ +package org.datadog.jenkins.plugins.datadog.util; + +import java.util.function.Consumer; +import javax.annotation.concurrent.GuardedBy; + +public class CircuitBreaker { + + private static final int DEFAULT_MIN_HEALTH_CHECK_DELAY_MILLIS = 1000; + private static final int DEFAULT_MAX_HEALTH_CHECK_DELAY_MILLIS = 60000; + private static final double DEFAULT_DELAY_FACTOR = 2.0; + + private final Consumer action; + private final Consumer fallback; + private final Consumer errorHandler; + private final long minHealthCheckDelayMillis; + private final long maxHealthCheckDelayMillis; + private final double delayFactor; + + @GuardedBy("this") + private boolean healthy; + @GuardedBy("this") + private long healthCheckDelayMillis; + @GuardedBy("this") + private long healthCheckAt; + + public CircuitBreaker(Consumer action, Consumer fallback, Consumer errorHandler) { + this(action, fallback, errorHandler, DEFAULT_MIN_HEALTH_CHECK_DELAY_MILLIS, DEFAULT_MAX_HEALTH_CHECK_DELAY_MILLIS, DEFAULT_DELAY_FACTOR); + } + + public CircuitBreaker(Consumer action, + Consumer fallback, + Consumer errorHandler, + long minHealthCheckDelayMillis, + long maxHealthCheckDelayMillis, + double delayFactor) { + this.action = action; + this.fallback = fallback; + this.errorHandler = errorHandler; + this.minHealthCheckDelayMillis = minHealthCheckDelayMillis; + this.maxHealthCheckDelayMillis = maxHealthCheckDelayMillis; + this.delayFactor = delayFactor; + synchronized (this) { + this.healthy = true; + this.healthCheckDelayMillis = minHealthCheckDelayMillis; + } + } + + public synchronized void accept(T t) { + // normal flow + if (healthy) { + try { + action.accept(t); + } catch (Exception e) { + errorHandler.accept(e); + healthy = false; + healthCheckAt = System.currentTimeMillis() + healthCheckDelayMillis; + fallback.accept(t); + } + + // try to recover + } else if (System.currentTimeMillis() >= healthCheckAt) { + try { + action.accept(t); + healthy = true; + healthCheckDelayMillis = minHealthCheckDelayMillis; + } catch (Exception e) { + errorHandler.accept(e); + healthCheckDelayMillis = Math.min(Math.round(healthCheckDelayMillis * delayFactor), maxHealthCheckDelayMillis); + healthCheckAt = System.currentTimeMillis() + healthCheckDelayMillis; + fallback.accept(t); + } + + // "broken" flow + } else { + fallback.accept(t); + } + } + + public synchronized void reset() { + healthy = true; + healthCheckDelayMillis = minHealthCheckDelayMillis; + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java index c3bb6067b..098351110 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java @@ -26,27 +26,29 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.clients; import hudson.model.Run; -import hudson.util.Secret; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.concurrent.CountDownLatch; +import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic; -import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; -import org.datadog.jenkins.plugins.datadog.transport.FakeTracesHttpClient; +import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; +import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.junit.Assert; @@ -56,91 +58,12 @@ public class DatadogClientStub implements DatadogClient { public List serviceChecks; public List events; public List logLines; - public List webhooks; - - private List webhookLatches; - - public FakeTracesHttpClient agentHttpClient; - - public DatadogBaseBuildLogic traceBuildLogic; - public DatadogBasePipelineLogic tracePipelineLogic; - public DatadogClientStub() { this.metrics = new ArrayList<>(); this.serviceChecks = new ArrayList<>(); this.events = new ArrayList<>(); this.logLines = new ArrayList<>(); - this.webhooks = new ArrayList<>(); - this.webhookLatches = new ArrayList<>(); - this.agentHttpClient = new FakeTracesHttpClient(); - this.traceBuildLogic = new DatadogTraceBuildLogic(this.agentHttpClient); - this.tracePipelineLogic = new DatadogTracePipelineLogic(this.agentHttpClient); - } - - @Override - public void setUrl(String url) { - // noop - } - - @Override - public void setLogIntakeUrl(String logIntakeUrl) { - // noop - } - - @Override - public void setWebhookIntakeUrl(String webhookIntakeUrl) { - // noop - } - - @Override - public void setApiKey(Secret apiKey) { - // noop - } - - @Override - public void setHostname(String hostname) { - // noop - } - - @Override - public void setPort(Integer port) { - // noop - } - - @Override - public void setLogCollectionPort(Integer logCollectionPort) { - - } - - @Override - public boolean isDefaultIntakeConnectionBroken() { - return false; - } - - @Override - public void setDefaultIntakeConnectionBroken(boolean defaultIntakeConnectionBroken) { - // noop - } - - @Override - public boolean isLogIntakeConnectionBroken() { - return false; - } - - @Override - public void setLogIntakeConnectionBroken(boolean logIntakeConnectionBroken) { - // noop - } - - @Override - public boolean isWebhookIntakeConnectionBroken() { - return false; - } - - @Override - public void setWebhookIntakeConnectionBroken(boolean webhookIntakeConnectionBroken) { - // noop } @Override @@ -198,44 +121,6 @@ public boolean sendLogs(String payloadLogs) { return true; } - @Override - public boolean postWebhook(String webhook) { - synchronized (webhookLatches) { - JSONObject payload = JSONObject.fromObject(webhook); - webhooks.add(payload); - for(final CountDownLatch latch : webhookLatches) { - if(webhooks.size() >= latch.getCount()) { - while (latch.getCount() > 0) { - latch.countDown(); - } - } - } - } - return true; - } - - @Override - public boolean startBuildTrace(BuildData buildData, Run run) { - this.traceBuildLogic.startBuildTrace(buildData, run); - return true; - } - - @Override - public boolean finishBuildTrace(BuildData buildData, Run run) { - this.traceBuildLogic.finishBuildTrace(buildData, run); - return true; - } - - @Override - public boolean sendPipelineTrace(Run run, FlowNode flowNode) { - this.tracePipelineLogic.execute(run, flowNode); - return true; - } - - public FakeTracesHttpClient agentHttpClient(){ - return this.agentHttpClient; - } - public boolean assertMetric(String name, double value, String hostname, String[] tags) { DatadogMetric m = new DatadogMetric(name, value, hostname, Arrays.asList(tags)); if (this.metrics.contains(m)) { @@ -372,24 +257,103 @@ public static Map> addTagToMap(Map> tags return tags; } + private final StubTraceWriteStrategy traceWriteStrategy = new StubTraceWriteStrategy(); + + private static final class StubTraceWriteStrategy implements TraceWriteStrategy { + private volatile boolean isWebhook = false; + private final Collection traces = new LinkedBlockingQueue<>(); + private final Collection webhooks = new LinkedBlockingQueue<>(); + + @Override + public JSONObject serialize(BuildData buildData, Run run) { + if (isWebhook) { + JSONObject json = new DatadogWebhookBuildLogic().finishBuildTrace(buildData, run); + webhooks.add(json); + return json; + } else { + TraceSpan span = new DatadogTraceBuildLogic().createSpan(buildData, run); + traces.add(span); + return new JsonTraceSpanMapper().map(span); + } + } + + @Override + public Collection serialize(FlowNode flowNode, Run run) { + if (isWebhook) { + Collection spans = new DatadogWebhookPipelineLogic().execute(flowNode, run); + webhooks.addAll(spans); + return spans; + } else { + Collection traceSpans = new DatadogTracePipelineLogic().collectTraces(flowNode, run); + traces.addAll(traceSpans); + JsonTraceSpanMapper mapper = new JsonTraceSpanMapper(); + return traceSpans.stream().map(mapper::map).collect(Collectors.toList()); + } + } + + @Override + public void send(List spans) { + // no op + } + + public void configureForWebhooks() { + isWebhook = true; + } + } + public void configureForWebhooks() { - traceBuildLogic = new DatadogWebhookBuildLogic(this); - tracePipelineLogic = new DatadogWebhookPipelineLogic(this); + traceWriteStrategy.configureForWebhooks(); + } + + @Override + public TraceWriteStrategy createTraceWriteStrategy() { + return traceWriteStrategy; } public boolean waitForWebhooks(final int number) throws InterruptedException { - final CountDownLatch latch = new CountDownLatch(number); - synchronized (webhookLatches) { - if (webhooks.size() >= number) { + long timeout = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(10); + while (System.currentTimeMillis() < timeout) { + if (traceWriteStrategy.webhooks.size() >= number) { return true; } - webhookLatches.add(latch); + Thread.sleep(100L); + } + if (traceWriteStrategy.webhooks.size() < number) { + throw new AssertionError("Failed while waiting for " + number + " webhooks, got " + traceWriteStrategy.webhooks.size() + ": " + traceWriteStrategy.webhooks); + } else { + return true; + } + } + + public boolean waitForTraces(int number) throws InterruptedException { + long timeout = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(10); + while (System.currentTimeMillis() < timeout) { + if (traceWriteStrategy.traces.size() >= number) { + return true; + } + Thread.sleep(100L); + } + if (traceWriteStrategy.traces.size() < number) { + throw new AssertionError("Failed while waiting for " + number + " traces, got " + traceWriteStrategy.traces.size() + ": " + traceWriteStrategy.traces); + } else { + return true; } - return latch.await(10, TimeUnit.SECONDS); } public List getWebhooks() { - return this.webhooks; + return new ArrayList<>(traceWriteStrategy.webhooks); } + public List getSpans() { + ArrayList spans = new ArrayList<>(traceWriteStrategy.traces); + Collections.sort(spans, (span1, span2) -> { + if(span1.getStartNano() < span2.getStartNano()){ + return -1; + } else if (span1.getStartNano() > span2.getStartNano()) { + return 1; + } + return 0; + }); + return spans; + } } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java index 1d44c7b43..f121a2aaa 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java @@ -51,8 +51,8 @@ public class DatadogClientTest { public void testHttpClientGetInstanceApiKey() { //validateConfiguration throws an error when given an invalid API key when the urls are valid Exception exception = Assert.assertThrows(IllegalArgumentException.class, () -> { - DatadogHttpClient.enableValidations = false; - DatadogHttpClient client = (DatadogHttpClient) DatadogHttpClient.getInstance("http", "test", "test", null); + DatadogApiClient.enableValidations = false; + DatadogApiClient client = (DatadogApiClient) DatadogApiClient.getInstance("http", "test", "test", null); client.validateConfiguration(); }); @@ -65,8 +65,8 @@ public void testHttpClientGetInstanceApiKey() { public void testHttpClientGetInstanceApiUrl() { // validateConfiguration throws an error when given an invalid url Exception exception = Assert.assertThrows(IllegalArgumentException.class, () -> { - DatadogHttpClient.enableValidations = false; - DatadogHttpClient client = (DatadogHttpClient) DatadogHttpClient.getInstance("", null, null, null); + DatadogApiClient.enableValidations = false; + DatadogApiClient client = (DatadogApiClient) DatadogApiClient.getInstance("", null, null, null); client.validateConfiguration(); }); String expectedMessage = "Datadog Target URL is not set properly"; @@ -78,8 +78,8 @@ public void testHttpClientGetInstanceApiUrl() { @Test public void testHttpClientGetInstanceEnableValidations() { // calling getInstance with invalid data returns null - DatadogHttpClient.enableValidations = true; - DatadogClient client = DatadogHttpClient.getInstance("https", null, null, null); + DatadogApiClient.enableValidations = true; + DatadogClient client = DatadogApiClient.getInstance("https", null, null, null); Assert.assertEquals(client, null); } @@ -112,7 +112,7 @@ public void testEvpProxyEnabled() { cfg.setEnableCiVisibility(true); DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236)); Mockito.doReturn(new HashSet(Arrays.asList("/evp_proxy/v3/"))).when(client).fetchAgentSupportedEndpoints(); - Assert.assertTrue(client.checkEvpProxySupportAndUpdateLogic()); + Assert.assertTrue(client.isEvpProxySupported()); } @Test @@ -121,7 +121,7 @@ public void testEvpProxyDisabled() { cfg.setEnableCiVisibility(true); DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236)); Mockito.doReturn(new HashSet()).when(client).fetchAgentSupportedEndpoints(); - Assert.assertFalse(client.checkEvpProxySupportAndUpdateLogic()); + Assert.assertFalse(client.isEvpProxySupported()); } @Test @@ -134,8 +134,8 @@ public void testEmptyAgentSupportedEndpointsWithNoAgent() { @Test public void testIncrementCountAndFlush() throws IOException, InterruptedException { - DatadogHttpClient.enableValidations = false; - DatadogClient client = DatadogHttpClient.getInstance("test", null, null, null); + DatadogApiClient.enableValidations = false; + DatadogClient client = DatadogApiClient.getInstance("test", null, null, null); Map> tags1 = new HashMap<>(); tags1 = DatadogClientStub.addTagToMap(tags1, "tag1", "value"); tags1 = DatadogClientStub.addTagToMap(tags1, "tag2", "value"); @@ -196,8 +196,8 @@ public void testIncrementCountAndFlushThreadedEnv() throws IOException, Interrup @Override public void run() { // We use a new instance of a client on every run. - DatadogHttpClient.enableValidations = false; - DatadogClient client = DatadogHttpClient.getInstance("test2", null, null, null); + DatadogApiClient.enableValidations = false; + DatadogClient client = DatadogApiClient.getInstance("test2", null, null, null); Map> tags = new HashMap<>(); tags = DatadogClientStub.addTagToMap(tags, "tag1", "value"); tags = DatadogClientStub.addTagToMap(tags, "tag2", "value"); @@ -225,8 +225,8 @@ public void testIncrementCountAndFlushThreadedEnvThreadCheck() throws IOExceptio @Override public void run() { // We use a new instance of a client on every run. - DatadogHttpClient.enableValidations = false; - DatadogClient client = DatadogHttpClient.getInstance("test3", null, null, null); + DatadogApiClient.enableValidations = false; + DatadogClient client = DatadogApiClient.getInstance("test3", null, null, null); Map> tags = new HashMap<>(); tags = DatadogClientStub.addTagToMap(tags, "tag1", "value"); tags = DatadogClientStub.addTagToMap(tags, "tag2", "value"); @@ -264,8 +264,8 @@ public Boolean call() throws Exception { @Test public void testIncrementCountAndFlushThreadedEnvOneClient() throws IOException, InterruptedException { ExecutorService executor = Executors.newFixedThreadPool(2); - DatadogHttpClient.enableValidations = false; - final DatadogClient client = DatadogHttpClient.getInstance("testing", null, null, null); + DatadogApiClient.enableValidations = false; + final DatadogClient client = DatadogApiClient.getInstance("testing", null, null, null); Runnable increment = new Runnable() { @Override public void run() { diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index cbf1c0c56..3c159bb75 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -27,6 +27,12 @@ import hudson.model.Label; import hudson.slaves.DumbSlave; import hudson.slaves.EnvironmentVariablesNodeProperty; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; import jenkins.model.Jenkins; import net.sf.json.JSONArray; import net.sf.json.JSONObject; @@ -37,20 +43,12 @@ import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.transport.FakeTracesHttpClient; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.jvnet.hudson.test.ExtractResourceSCM; import org.jvnet.hudson.test.JenkinsRule; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; - public class DatadogBuildListenerIT extends DatadogTraceAbstractTest { private static final String SAMPLE_SERVICE_NAME = "sampleServiceName"; @@ -91,9 +89,8 @@ public void testTracesQueueTime() throws Exception{ DumbSlave worker = null; try { worker = jenkinsRule.createOnlineSlave(Label.get("testBuild")); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -129,9 +126,8 @@ public void testTraces() throws Exception { FreeStyleBuild run = project.scheduleBuild2(0).get(); final String buildPrefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -185,9 +181,8 @@ public void testGitDefaultBranch() throws Exception { } project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -217,9 +212,8 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { } project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -255,9 +249,8 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { } project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -278,8 +271,6 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { @Test public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { - clientStub.configureForWebhooks(); - Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); @@ -340,9 +331,8 @@ public void testRawRepositoryUrl() throws Exception { project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -368,9 +358,8 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -384,8 +373,6 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { @Test public void testGitAlternativeRepoUrlWebhook() throws Exception { - clientStub.configureForWebhooks(); - Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); @@ -421,16 +408,13 @@ public void testTracesDisabled() throws Exception { final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccess-notraces"); project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(0); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(0); + final List spans = clientStub.getSpans(); assertEquals(0, spans.size()); } @Test public void testTracesDisabledWebhooks() throws Exception { - clientStub.configureForWebhooks(); - DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setEnableCiVisibility(false); @@ -451,9 +435,8 @@ public void testCITagsOnTraces() throws Exception { final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessTags_job"); project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -464,8 +447,6 @@ public void testCITagsOnTraces() throws Exception { @Test public void testCITagsOnWebhooks() throws Exception { - clientStub.configureForWebhooks(); - DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setGlobalJobTags("(.*?)_job, global_job_tag:$ENV_VAR"); cfg.setGlobalTags("global_tag:$ENV_VAR"); @@ -489,9 +470,8 @@ public void testAvoidSettingEmptyGitInfoOnTraces() throws Exception { final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessTagsNoGitInfo"); project.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index a224dd3c8..12c3f19af 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -51,7 +51,6 @@ import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.transport.FakeTracesHttpClient; import org.jenkinsci.plugins.workflow.actions.LabelAction; import org.jenkinsci.plugins.workflow.actions.ThreadNameAction; import org.jenkinsci.plugins.workflow.actions.TimingAction; @@ -192,9 +191,8 @@ public void testIntegration() throws Exception { } } - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(16); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(16); + final List spans = clientStub.getSpans(); assertEquals(16, spans.size()); } @@ -224,9 +222,8 @@ public void testIntegrationGitInfo() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); assertGitVariablesOnSpan(buildSpan, "master"); @@ -317,9 +314,8 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); assertGitVariablesOnSpan(buildSpan, "hardcoded-master"); @@ -351,9 +347,8 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(5); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(5); + final List spans = clientStub.getSpans(); assertEquals(5, spans.size()); for(TraceSpan span : spans) { assertEquals("401d997a6eede777602669ccaec059755c98161f", span.getMeta().get(CITags.GIT_COMMIT_SHA)); @@ -386,9 +381,8 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(5); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(5); + final List spans = clientStub.getSpans(); assertEquals(5, spans.size()); for(TraceSpan span : spans) { assertEquals("https://github.com/johndoe/foobar.git", span.getMeta().get(CITags.GIT_REPOSITORY_URL)); @@ -458,9 +452,8 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); assertGitVariablesOnSpan(buildSpan, "master"); @@ -539,9 +532,8 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); final Map meta = buildSpan.getMeta(); @@ -577,9 +569,8 @@ public void testRawRepositoryUrl() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); final Map meta = buildSpan.getMeta(); @@ -607,9 +598,8 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); final Map meta = buildSpan.getMeta(); @@ -636,9 +626,8 @@ public void testStageNamePropagation() throws Exception{ }).start(); jenkinsRule.createOnlineSlave(Label.get("testStageName")); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(6); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(6); + final List spans = clientStub.getSpans(); assertEquals(6, spans.size()); final TraceSpan stage1 = searchSpan(spans, "Stage 1"); @@ -720,9 +709,8 @@ public void testIntegrationPipelineQueueTimeOnStages() throws Exception { Thread.sleep(10000); final DumbSlave worker = jenkinsRule.createOnlineSlave(Label.get("testStage")); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(6); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(6); + final List spans = clientStub.getSpans(); assertEquals(6, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -830,9 +818,8 @@ public void testIntegrationPipelineQueueTimeOnPipeline() throws Exception { Thread.sleep(15000); final DumbSlave worker = jenkinsRule.createOnlineSlave(Label.get("testPipeline")); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -884,9 +871,8 @@ public void testIntegrationNoFailureTag() throws Exception { final String stagePrefix = BuildPipelineNode.NodeType.STAGE.getTagName(); final String stepPrefix = BuildPipelineNode.NodeType.STEP.getTagName(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -975,9 +961,8 @@ public void testIntegrationPipelineSkippedLogic() throws Exception { job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(2); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(2); + final List spans = clientStub.getSpans(); assertEquals(2, spans.size()); final TraceSpan stage = spans.get(1); @@ -1032,9 +1017,8 @@ public void testIntegrationTracesDisabled() throws Exception{ clientStub.assertMetric("jenkins.job.stage_duration", hostname, tags); clientStub.assertMetric("jenkins.job.stage_pause_duration", 0, hostname, tags); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(0); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(0); + final List spans = clientStub.getSpans(); assertEquals(0, spans.size()); } @@ -1094,9 +1078,8 @@ public void testStagesNodeNames_complexPipelineStages01() throws Exception { final DumbSlave worker02 = jenkinsRule.createOnlineSlave(Label.get("worker02")); final DumbSlave worker03 = jenkinsRule.createOnlineSlave(Label.get("worker03")); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(19); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(19); + final List spans = clientStub.getSpans(); assertEquals(19, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -1190,9 +1173,8 @@ public void testGlobalTagsPropagationsTraces() throws Exception { job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -1227,9 +1209,8 @@ public void testErrorPropagationOnStages() throws Exception { job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan stepSpan = spans.get(2); @@ -1294,9 +1275,8 @@ public void testUnstablePropagationOnStages() throws Exception { job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan stepSpan = spans.get(2); @@ -1365,9 +1345,8 @@ public void testCustomHostnameForWorkers() throws Exception { }).start(); final DumbSlave worker = jenkinsRule.createOnlineSlave(Label.get("testPipelineWorker")); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -1463,9 +1442,8 @@ public void testCustomHostnameForWorkersInFreestyleJob() throws Exception { } }).start(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(1); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(1); + final List spans = clientStub.getSpans(); assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); @@ -1555,9 +1533,8 @@ public void testIsManualTrue() throws Exception { CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new UserIdCause("johanna")); job.scheduleBuild2(0, causeAction).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); final String isManual = buildSpan.getMeta().get(CITags.IS_MANUAL); @@ -1597,9 +1574,8 @@ public void testIsManualFalse() throws Exception { CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new SCMTriggerCause("scm")); job.scheduleBuild2(0, causeAction).get(); - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(3); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(3); + final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); final String isManual = buildSpan.getMeta().get(CITags.IS_MANUAL); @@ -1736,9 +1712,8 @@ private void givenPipeline(String name, String definitionPath) throws Exception } private List whenExecuting(int expectedSpanCount) throws InterruptedException, TimeoutException { - final FakeTracesHttpClient agentHttpClient = clientStub.agentHttpClient(); - agentHttpClient.waitForTraces(expectedSpanCount); - final List spans = agentHttpClient.getSpans(); + clientStub.waitForTraces(expectedSpanCount); + final List spans = clientStub.getSpans(); assertEquals(expectedSpanCount, spans.size()); return spans; } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapperTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapperTest.java index 195d3bb05..543b40fe9 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapperTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/traces/mapper/JsonTraceSpanMapperTest.java @@ -12,18 +12,14 @@ import static org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper.SPAN_TYPE; import static org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper.START; import static org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper.TRACE_ID; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; -import net.sf.json.JSONArray; +import java.util.Map; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.junit.Test; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.Collections; -import java.util.Map; - public class JsonTraceSpanMapperTest { public static final JsonTraceSpanMapper sut = new JsonTraceSpanMapper(); @@ -41,14 +37,9 @@ public void testJsonTraceSpanMapper() { span.setEndNano(1000); //When - final JSONArray traces = JSONArray.fromObject(new String(sut.map(Collections.singletonList(span)), StandardCharsets.UTF_8)); + final JSONObject jsonSpan = sut.map(span); //Then - assertEquals(1, traces.size()); - JSONArray trace = (JSONArray) traces.get(0); - assertEquals(1, trace.size()); - - final JSONObject jsonSpan = (JSONObject) trace.get(0); assertNotEquals(0, jsonSpan.get(TRACE_ID)); assertNotEquals(0, jsonSpan.get(SPAN_ID)); assertNotEquals(0, jsonSpan.get(PARENT_ID)); @@ -58,35 +49,9 @@ public void testJsonTraceSpanMapper() { assertEquals("test-service-name", jsonSpan.get(SERVICE_NAME)); assertEquals("test-type", jsonSpan.get(SPAN_TYPE)); assertEquals("meta-value", ((Map)jsonSpan.get(META)).get("meta-key")); - assertEquals(1, ((Map)jsonSpan.get(METRICS)).get("metric-key")); + assertEquals(1.0, ((Map)jsonSpan.get(METRICS)).get("metric-key")); assertEquals(0, jsonSpan.get(START)); assertEquals(1000, jsonSpan.get(DURATION)); } - - @Test - public void testSameTrace() { - //Given - final TraceSpan rootSpan = new TraceSpan("root", 0); - final TraceSpan childSpan = new TraceSpan("child", 0, rootSpan.context()); - - //When - final JSONArray traces = JSONArray.fromObject(new String(sut.map(Arrays.asList(rootSpan, childSpan)), StandardCharsets.UTF_8)); - - //Then - assertEquals(1, traces.size()); - } - - @Test - public void testDifferentTrace() { - //Given - final TraceSpan rootOneSpan = new TraceSpan("rootOne", 0); - final TraceSpan rootTwoSpan = new TraceSpan("rootTwo", 0); - - //When - final JSONArray traces = JSONArray.fromObject(new String(sut.map(Arrays.asList(rootOneSpan, rootTwoSpan)), StandardCharsets.UTF_8)); - - //Then - assertEquals(2, traces.size()); - } } \ No newline at end of file diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeHttpSender.java b/src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeHttpSender.java deleted file mode 100644 index a13c01903..000000000 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeHttpSender.java +++ /dev/null @@ -1,70 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicInteger; - -public class FakeHttpSender extends HttpSender { - private static final HttpErrorHandler NO_OP = new HttpErrorHandler() { - @Override - public void handle(Exception exception) { - // N/A - } - }; - - private static final Logger log = LoggerFactory.getLogger(FakeHttpSender.class); - - private final List httpMessages = new CopyOnWriteArrayList<>(); - private final List latches = new ArrayList(); - private final AtomicInteger messageCount = new AtomicInteger(); - - FakeHttpSender(BlockingQueue queue) { - super(queue, NO_OP, 1000); - } - - @Override - protected void process(HttpMessage message) { - this.messageCount.incrementAndGet(); - synchronized (this.latches) { - httpMessages.add(message); - for(final CountDownLatch latch : latches) { - if(httpMessages.size() >= latch.getCount()) { - while (latch.getCount() > 0) { - latch.countDown(); - } - } - } - } - } - - public boolean waitForMessagesMax(final int number, int seconds) throws InterruptedException { - final CountDownLatch latch = new CountDownLatch(number); - synchronized (latches) { - if (httpMessages.size() >= number) { - return true; - } - latches.add(latch); - } - return latch.await(seconds, TimeUnit.SECONDS); - } - - public void waitForMessages(final int number) throws InterruptedException, TimeoutException { - if (!waitForMessagesMax(number, 20)) { - String msg = "Timeout waiting for " + number + " message(s). messages.size() == " + httpMessages.size(); - log.warn(msg); - throw new TimeoutException(msg); - } - } - - public List getHttpMessages() { - return httpMessages; - } -} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeTracesHttpClient.java b/src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeTracesHttpClient.java deleted file mode 100644 index 17b3a2105..000000000 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/transport/FakeTracesHttpClient.java +++ /dev/null @@ -1,85 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicInteger; - -public class FakeTracesHttpClient implements HttpClient { - private static final Logger log = LoggerFactory.getLogger(FakeTracesHttpClient.class); - - private final List spans = new CopyOnWriteArrayList<>(); - private final List latches = new ArrayList(); - private final AtomicInteger traceCount = new AtomicInteger(); - - @Override - public void send(List messages) { - for(PayloadMessage msg : messages){ - final TraceSpan span = (TraceSpan) msg; - this.traceCount.incrementAndGet(); - synchronized (this.latches) { - spans.add(span); - for(final CountDownLatch latch : latches) { - if(spans.size() >= latch.getCount()) { - while (latch.getCount() > 0) { - latch.countDown(); - } - } - } - } - } - } - - public boolean waitForTracesMax(final int number, int seconds) throws InterruptedException { - final CountDownLatch latch = new CountDownLatch(number); - synchronized (latches) { - if (spans.size() >= number) { - return true; - } - latches.add(latch); - } - return latch.await(seconds, TimeUnit.SECONDS); - } - - public void waitForTraces(final int number) throws InterruptedException, TimeoutException { - if (!waitForTracesMax(number, 20)) { - String msg = "Timeout waiting for " + number + " trace(s). FakeAgentHttpClient.size() == " + spans.size(); - log.warn(msg); - throw new TimeoutException(msg); - } - } - - public List getSpans() { - Collections.sort(spans, new Comparator() { - @Override - public int compare(TraceSpan span1, TraceSpan span2) { - if(span1.getStartNano() < span2.getStartNano()){ - return -1; - } else if (span1.getStartNano() > span2.getStartNano()) { - return 1; - } - return 0; - } - }); - return spans; - } - - @Override - public void stop() { - // N/A - } - - @Override - public void close() { - // N/A - } -} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/transport/HttpSenderTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/transport/HttpSenderTest.java deleted file mode 100644 index 1d7df983c..000000000 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/transport/HttpSenderTest.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.transport; - -import static org.junit.Assert.*; - -import org.junit.Test; - -import java.net.URL; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeoutException; - -public class HttpSenderTest { - - private static final HttpMessage SAMPLE_MESSAGE = new HttpMessage(buildURL("http://localhost"), null, null, null); - - @Test - public void testHttpSenderConsumer() throws TimeoutException, InterruptedException { - //Given - final BlockingQueue queue = new LinkedBlockingQueue<>(10); - final FakeHttpSender sender = new FakeHttpSender(queue); - final ExecutorService executor = Executors.newSingleThreadExecutor(); - executor.submit(sender); - - //When - sender.send(SAMPLE_MESSAGE); - sender.send(SAMPLE_MESSAGE); - sender.send(SAMPLE_MESSAGE); - - //Then - sender.waitForMessages(3); - assertEquals(3, sender.getHttpMessages().size()); - } - - - private static URL buildURL(final String urlStr) { - try { - return new URL(urlStr); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } -} \ No newline at end of file From 5c3ffb3a2fdbd7b28e867eaa111a491a761ba50a Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Tue, 23 Jan 2024 21:40:48 +0100 Subject: [PATCH 02/17] Submit pipeline stages and steps without waiting until the pipeline finishes --- pom.xml | 5 + .../plugins/datadog/DatadogUtilities.java | 168 +++--- .../datadog/clients/DatadogAgentClient.java | 6 +- .../events/AbstractDatadogBuildEvent.java | 2 +- .../listeners/DatadogBuildListener.java | 69 +-- .../listeners/DatadogGraphListener.java | 425 ++++---------- .../listeners/DatadogQueueListener.java | 108 ++-- .../datadog/listeners/DatadogSCMListener.java | 165 +++++- .../listeners/DatadogStepListener.java | 248 ++++++++- .../plugins/datadog/model/BuildData.java | 493 ++++++----------- .../plugins/datadog/model/BuildPipeline.java | 230 -------- .../datadog/model/BuildPipelineNode.java | 519 ++++-------------- .../datadog/model/CIGlobalTagsAction.java | 24 - .../datadog/model/DatadogPluginAction.java | 10 + .../datadog/model/FlowNodeQueueData.java | 44 -- .../datadog/model/GitCommitAction.java | 268 ++++++--- .../datadog/model/GitRepositoryAction.java | 116 +++- .../datadog/model/PipelineNodeInfoAction.java | 102 +++- .../model/PipelineQueueInfoAction.java | 112 +++- .../datadog/model/StageBreakdownAction.java | 29 - .../jenkins/plugins/datadog/model/Status.java | 35 ++ .../plugins/datadog/model/StepData.java | 157 ------ .../plugins/datadog/model/StepTraceData.java | 16 - .../datadog/model/TraceInfoAction.java | 82 +++ .../datadog/model/node/DequeueAction.java | 65 +++ .../datadog/model/node/EnqueueAction.java | 64 +++ .../datadog/model/node/NodeInfoAction.java | 125 +++++ .../datadog/model/node/QueueInfoAction.java | 10 + .../datadog/model/node/StatusAction.java | 77 +++ .../plugins/datadog/steps/DatadogOptions.java | 2 + .../datadog/traces/BuildSpanAction.java | 90 ++- .../datadog/traces/DatadogBaseBuildLogic.java | 83 ++- .../traces/DatadogBasePipelineLogic.java | 110 +--- .../traces/DatadogTraceBuildLogic.java | 89 ++- .../traces/DatadogTracePipelineLogic.java | 152 ++--- .../traces/DatadogWebhookBuildLogic.java | 70 +-- .../traces/DatadogWebhookPipelineLogic.java | 185 +++---- .../datadog/traces/IsPipelineAction.java | 9 - .../datadog/traces/StepDataAction.java | 29 - .../datadog/traces/StepTraceDataAction.java | 26 - .../TraceStepEnvironmentContributor.java | 31 +- .../datadog/traces/message/TraceSpan.java | 64 +++ .../traces/write/AgentTraceWriteStrategy.java | 12 +- .../traces/write/TraceWriteStrategy.java | 10 +- .../traces/write/TraceWriteStrategyImpl.java | 14 +- .../datadog/traces/write/TraceWriter.java | 17 +- .../datadog/util/DatadogActionConverter.java | 22 + .../plugins/datadog/util/TagsUtil.java | 34 +- .../plugins/datadog/util/git/GitUtils.java | 281 ++-------- .../datadog/util/git/RepositoryInfo.java | 16 +- .../util/git/RepositoryInfoCallback.java | 67 ++- .../DatadogGlobalConfigurationTest.java | 7 +- .../datadog/clients/DatadogClientStub.java | 85 ++- .../datadog/clients/DatadogClientTest.java | 8 +- .../listeners/DatadogBuildListenerIT.java | 107 ++-- .../listeners/DatadogBuildListenerTest.java | 3 +- .../listeners/DatadogGraphListenerTest.java | 172 +++--- .../listeners/DatadogTraceAbstractTest.java | 20 +- .../DatadogTaskListenerDecoratorTest.java | 19 +- .../datadog/model/ActionConverterTest.java | 37 ++ .../datadog/model/BuildPipelineTest.java | 67 --- .../model/BuildSpanActionConverterTest.java | 35 ++ .../model/GitCommitActionConverterTest.java | 41 ++ .../GitRepositoryActionConverterTest.java | 39 ++ .../PipelineNodeInfoActionConverterTest.java | 39 ++ .../PipelineQueueInfoActionConverterTest.java | 34 ++ .../model/TraceInfoActionConverterTest.java | 32 ++ .../node/DequeueActionConverterTest.java | 31 ++ .../node/EnqueueActionConverterTest.java | 32 ++ .../node/NodeInfoActionConverterTest.java | 39 ++ .../model/node/StatusActionConverterTest.java | 33 ++ .../datadog/steps/DatadogOptionsTest.java | 17 +- .../plugins/datadog/stubs/BuildStub.java | 15 +- .../testPipelineSuccessLocalCheckout.txt | 13 + .../testPipelinesOverrideGitCommit.txt | 3 +- .../jenkins/plugins/datadog/test-config.yml | 2 +- 76 files changed, 3150 insertions(+), 2967 deletions(-) delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java delete mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/ActionConverterTest.java delete mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildSpanActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/GitCommitActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoActionConverterTest.java create mode 100644 src/test/java/org/datadog/jenkins/plugins/datadog/model/node/StatusActionConverterTest.java create mode 100644 src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt diff --git a/pom.xml b/pom.xml index 096489f7d..54aa82119 100644 --- a/pom.xml +++ b/pom.xml @@ -152,6 +152,11 @@ workflow-basic-steps test + + org.jenkins-ci.plugins + git + test + org.jenkins-ci.plugins.workflow workflow-durable-task-step diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java index a2107fb06..cc22d68a2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java @@ -27,14 +27,13 @@ of this software and associated documentation files (the "Software"), to deal import hudson.EnvVars; import hudson.ExtensionList; -import hudson.XmlFile; import hudson.model.Computer; import hudson.model.Item; import hudson.model.Result; import hudson.model.Run; +import hudson.model.TaskListener; import hudson.model.User; import hudson.model.labels.LabelAtom; -import hudson.util.LogTaskListener; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -53,11 +52,9 @@ of this software and associated documentation files (the "Software"), to deal import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; import java.util.TimeZone; import java.util.function.Function; -import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -68,21 +65,10 @@ of this software and associated documentation files (the "Software"), to deal import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; -import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; import org.datadog.jenkins.plugins.datadog.clients.HttpClient; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; -import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; -import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; -import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; -import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; +import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction; import org.datadog.jenkins.plugins.datadog.steps.DatadogPipelineAction; -import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; import org.datadog.jenkins.plugins.datadog.traces.CITags; -import org.datadog.jenkins.plugins.datadog.traces.IsPipelineAction; -import org.datadog.jenkins.plugins.datadog.traces.StepDataAction; -import org.datadog.jenkins.plugins.datadog.traces.StepTraceDataAction; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; import org.jenkinsci.plugins.pipeline.StageStatus; @@ -93,13 +79,14 @@ of this software and associated documentation files (the "Software"), to deal import org.jenkinsci.plugins.workflow.actions.QueueItemAction; import org.jenkinsci.plugins.workflow.actions.StageAction; import org.jenkinsci.plugins.workflow.actions.ThreadNameAction; +import org.jenkinsci.plugins.workflow.actions.TimingAction; import org.jenkinsci.plugins.workflow.actions.WarningAction; -import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.flow.FlowExecution; +import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; import org.jenkinsci.plugins.workflow.graph.BlockEndNode; import org.jenkinsci.plugins.workflow.graph.BlockStartNode; -import org.jenkinsci.plugins.workflow.graph.FlowEndNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.jenkinsci.plugins.workflow.job.WorkflowRun; public class DatadogUtilities { @@ -185,7 +172,7 @@ public static Map> getBuildTags(Run run, EnvVars envVars) { * Pipeline extraTags if any are configured in the Job from DatadogPipelineAction. * * @param run - Current build - * @return A {@link HashMap} containing the key,value pairs of tags if any. + * @return A {@link Map} containing the key,value pairs of tags if any. */ public static Map> getTagsFromPipelineAction(Run run) { // pipeline defined tags @@ -193,25 +180,22 @@ public static Map> getTagsFromPipelineAction(Run run) { DatadogPipelineAction action = run.getAction(DatadogPipelineAction.class); if (action != null) { List pipelineTags = action.getTags(); - for (int i = 0; i < pipelineTags.size(); i++) { - String[] tagItem = pipelineTags.get(i).replaceAll(" ", "").split(":", 2); + for (String pipelineTag : pipelineTags) { + String[] tagItem = pipelineTag.replaceAll(" ", "").split(":", 2); if (tagItem.length == 2) { String tagName = tagItem[0]; String tagValue = tagItem[1]; - Set tagValues = result.containsKey(tagName) ? result.get(tagName) : new HashSet(); + Set tagValues = result.computeIfAbsent(tagName, k -> new HashSet<>()); tagValues.add(tagValue.toLowerCase()); - result.put(tagName, tagValues); } else if (tagItem.length == 1) { String tagName = tagItem[0]; - Set tagValues = result.containsKey(tagName) ? result.get(tagName) : new HashSet(); + Set tagValues = result.computeIfAbsent(tagName, k -> new HashSet<>()); tagValues.add(""); // no values - result.put(tagName, tagValues); } else { logger.fine(String.format("Ignoring the tag %s. It is empty.", tagItem)); } } } - return result; } @@ -521,7 +505,7 @@ public static String getHostname(EnvVars envVars) { } final DatadogGlobalConfiguration datadogGlobalConfig = getDatadogGlobalDescriptor(); - if (datadogGlobalConfig != null){ + if (datadogGlobalConfig != null) { if (datadogGlobalConfig.isUseAwsInstanceHostname()) { try { logger.fine("Attempting to resolve AWS instance ID for hostname"); @@ -545,11 +529,6 @@ public static String getHostname(EnvVars envVars) { } } - if (isValidHostname(hostname)) { - logger.fine("Using hostname found in $HOSTNAME controller environment variable. Hostname: " + hostname); - return hostname; - } - hostname = System.getenv("HOSTNAME"); if (isValidHostname(hostname)) { logger.fine("Using hostname found in $HOSTNAME controller environment variable. Hostname: " + hostname); @@ -607,25 +586,6 @@ public static String getHostname(EnvVars envVars) { return null; } - /** - * Fetches the environment variables from the worker and returns the value - * of DD_CI_HOSTNAME if set. - * - * @param run - Current build - * @return the specified hostname or an empty Optional if not set - */ - public static Optional getHostnameFromWorkerEnv(Run run) { - try { - Map env = run.getEnvironment(new LogTaskListener(logger, Level.INFO)); - String envHostname = env.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME); - if (StringUtils.isNotEmpty(envHostname)) { - return Optional.of(envHostname); - } - } catch (IOException | InterruptedException e) { - } - return Optional.empty(); - } - /** * Validator function to ensure that the hostname is valid. Also, fails on * empty String. @@ -761,14 +721,6 @@ public static long currentTimeMillis() { return System.currentTimeMillis(); } - public static String getFileName(XmlFile file) { - if (file == null || file.getFile() == null || file.getFile().getName().isEmpty()) { - return "unknown"; - } else { - return file.getFile().getName(); - } - } - public static String getJenkinsUrl() { Jenkins jenkins = Jenkins.getInstance(); if (jenkins == null) { @@ -819,8 +771,8 @@ public static String getResultTag(@Nonnull FlowNode node) { * @param flowNode the flow node to evaluate * @return flag indicating if a flowNode is a Stage node. */ - public static boolean isStageNode(BlockStartNode flowNode) { - if (flowNode == null) { + public static boolean isStageNode(FlowNode flowNode) { + if (!(flowNode instanceof BlockStartNode)) { return false; } if (flowNode.getAction(StageAction.class) != null) { @@ -836,19 +788,22 @@ public static boolean isStageNode(BlockStartNode flowNode) { } /** - * Returns true if a {@code FlowNode} is a Pipeline node. - * - * @param flowNode the flow node to evaluate - * @return flag indicating if a flowNode is a Pipeline node. + * Returns enclosing stage node for the given node. + * Never returns the node itself. */ - public static boolean isPipelineNode(FlowNode flowNode) { - return flowNode instanceof FlowEndNode; + public static BlockStartNode getEnclosingStageNode(FlowNode node) { + for (BlockStartNode block : node.iterateEnclosingBlocks()) { + if (DatadogUtilities.isStageNode(block)) { + return block; + } + } + return null; } /** * Returns a normalized result for traces. * - * @param result (success, failure, error, aborted, not_build, canceled, skipped, unknown) + * @param result (success, failure, error, aborted, not_build, canceled, skipped, unstable, unknown) * @return the normalized result for the traces based on the jenkins result */ public static String statusFromResult(String result) { @@ -866,7 +821,7 @@ public static String statusFromResult(String result) { } @SuppressFBWarnings("NP_NULL_ON_SOME_PATH") - public static void severe(Logger logger, Throwable e, String message){ + public static void severe(Logger logger, Throwable e, String message) { if (e != null) { String stackTrace = ExceptionUtils.getStackTrace(e); message = (message != null ? message : "An unexpected error occurred: ") + stackTrace; @@ -963,28 +918,18 @@ public static String toJson(final Map map) { */ public static void cleanUpTraceActions(final Run run) { if (run != null) { - run.removeActions(BuildSpanAction.class); - run.removeActions(StepDataAction.class); - run.removeActions(CIGlobalTagsAction.class); - run.removeActions(GitCommitAction.class); - run.removeActions(GitRepositoryAction.class); - run.removeActions(PipelineNodeInfoAction.class); - run.removeActions(PipelineQueueInfoAction.class); - run.removeActions(StageBreakdownAction.class); - run.removeActions(IsPipelineAction.class); - run.removeActions(StepTraceDataAction.class); + // Each call to removeActions triggers persisting run data to disc. + // To avoid writing to disc multiple times, we only call removeActions once with the marker interface as the argument. + run.removeActions(DatadogPluginAction.class); } } - /** - * Check if a run is from a Jenkins pipeline. - * This action is added if the run is based on FlowNodes. - * - * @param run the current run. - * @return true if is a Jenkins pipeline. - */ - public static boolean isPipeline(final Run run) { - return run != null && run.getAction(IsPipelineAction.class) != null; + public static void cleanUpTraceActions(FlowNode flowNode) { + if (flowNode != null) { + // Each call to removeActions triggers persisting node data to disc. + // To avoid writing to disc multiple times, we only call removeActions once with the marker interface as the argument. + flowNode.removeActions(DatadogPluginAction.class); + } } public static String getCatchErrorResult(BlockStartNode startNode) { @@ -1007,6 +952,7 @@ public static String getCatchErrorResult(BlockStartNode startNode) { /** * Checks to see if event should be sent to client + * * @param eventName - the event to check * @return true if event should be sent to client */ @@ -1020,11 +966,12 @@ public static boolean shouldSendEvent(String eventName) { /** * Creates inclusion list for events by looking at toggles and inclusion/exclusion string lists + * * @return list of event name strings that can be sent */ private static List createIncludeLists() { List includedEvents = new ArrayList(Arrays.asList( - DatadogGlobalConfiguration.DEFAULT_EVENTS.split(","))); + DatadogGlobalConfiguration.DEFAULT_EVENTS.split(","))); DatadogGlobalConfiguration cfg = getDatadogGlobalDescriptor(); String includeEvents = cfg.getIncludeEvents(); @@ -1036,13 +983,13 @@ private static List createIncludeLists() { if (cfg.isEmitSystemEvents()) { includedEvents.addAll(new ArrayList( - Arrays.asList(DatadogGlobalConfiguration.SYSTEM_EVENTS.split(",")) + Arrays.asList(DatadogGlobalConfiguration.SYSTEM_EVENTS.split(",")) )); } if (cfg.isEmitSecurityEvents()) { includedEvents.addAll(new ArrayList( - Arrays.asList(DatadogGlobalConfiguration.SECURITY_EVENTS.split(",")) + Arrays.asList(DatadogGlobalConfiguration.SECURITY_EVENTS.split(",")) )); } @@ -1055,19 +1002,40 @@ private static List createIncludeLists() { } /** - * Check if flowNode is the last node of the pipeline. - * @param flowNode flowNode to check - * @return true if flowNode is the last node of the pipeline + * Returns the {@code Throwable} of a certain {@code FlowNode}, if it has errors. + * + * @return throwable associated with a certain flowNode. */ - public static boolean isLastNode(FlowNode flowNode) { - return flowNode instanceof FlowEndNode; + public static Throwable getErrorObj(FlowNode flowNode) { + final ErrorAction errorAction = flowNode.getAction(ErrorAction.class); + return (errorAction != null) ? errorAction.getError() : null; } - public static Long getNanosInQueue(BuildPipelineNode current) { - // If the concrete queue time for this node is not set - // we look for the queue time propagated by its children. - return Math.max(Math.max(current.getNanosInQueue(), current.getPropagatedNanosInQueue()), 0); + @Nullable + public static TaskListener getTaskListener(Run run) throws IOException { + if (run instanceof WorkflowRun) { + WorkflowRun workflowRun = (WorkflowRun) run; + FlowExecution execution = workflowRun.getExecution(); + if (execution != null) { + FlowExecutionOwner owner = execution.getOwner(); + return owner.getListener(); + } + } + return null; } + /** + * Returns the startTime of a certain {@code FlowNode}, if it has time information. + * @return startTime of the flowNode in milliseconds. + */ + public static long getTimeMillis(FlowNode flowNode) { + if (flowNode != null) { + TimingAction time = flowNode.getAction(TimingAction.class); + if(time != null) { + return time.getStartTime(); + } + } + return -1L; + } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java index b73756625..2d74e3f6c 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java @@ -142,11 +142,15 @@ public static DatadogClient getInstance(String hostname, Integer port, Integer l } protected DatadogAgentClient(String hostname, Integer port, Integer logCollectionPort, Integer traceCollectionPort) { + this(hostname, port, logCollectionPort, traceCollectionPort, HTTP_TIMEOUT_EVP_PROXY_MS); + } + + protected DatadogAgentClient(String hostname, Integer port, Integer logCollectionPort, Integer traceCollectionPort, long evpProxyTimeoutMillis) { this.hostname = hostname; this.port = port; this.logCollectionPort = logCollectionPort; this.traceCollectionPort = traceCollectionPort; - this.client = new HttpClient(HTTP_TIMEOUT_EVP_PROXY_MS); + this.client = new HttpClient(evpProxyTimeoutMillis); } public static ConnectivityResult checkConnectivity(final String host, final int port) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/events/AbstractDatadogBuildEvent.java b/src/main/java/org/datadog/jenkins/plugins/datadog/events/AbstractDatadogBuildEvent.java index c8fc1013b..0d17d1eb7 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/events/AbstractDatadogBuildEvent.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/events/AbstractDatadogBuildEvent.java @@ -38,7 +38,7 @@ public abstract class AbstractDatadogBuildEvent extends AbstractDatadogEvent { public AbstractDatadogBuildEvent(BuildData buildData) { this.buildData = buildData; - setHost(buildData.getHostname("unknown")); + setHost(buildData.getHostname(DatadogUtilities.getHostname(null))); setJenkinsUrl(buildData.getJenkinsUrl("unknown")); setAggregationKey(buildData.getJobName("unknown")); setDate(buildData.getEndTime(DatadogUtilities.currentTimeMillis()) / 1000); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java index 10ecf4d2c..90d934d4d 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListener.java @@ -26,7 +26,6 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.listeners; import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.cleanUpTraceActions; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.isPipeline; import static org.datadog.jenkins.plugins.datadog.traces.TracerConstants.SPAN_ID_ENVVAR_KEY; import static org.datadog.jenkins.plugins.datadog.traces.TracerConstants.TRACE_ID_ENVVAR_KEY; @@ -58,13 +57,12 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.events.BuildFinishedEventImpl; import org.datadog.jenkins.plugins.datadog.events.BuildStartedEventImpl; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; +import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; +import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; +import org.datadog.jenkins.plugins.datadog.model.TraceInfoAction; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanManager; -import org.datadog.jenkins.plugins.datadog.traces.StepDataAction; -import org.datadog.jenkins.plugins.datadog.traces.StepTraceDataAction; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriter; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; @@ -115,45 +113,20 @@ public void onInitialize(Run run) { final TraceSpan buildSpan = new TraceSpan("jenkins.build", TimeUnit.MILLISECONDS.toNanos(buildData.getStartTime(0L))); BuildSpanManager.get().put(buildData.getBuildTag(""), buildSpan); - // The buildData object is stored in the BuildSpanAction to be updated - // by the information that will be calculated when the pipeline listeners - // were executed. This is needed because if the user build is based on - // Jenkins Pipelines, there are many information that is missing when the - // root span is created, such as Git info (this is calculated in an inner step - // of the pipeline) - final BuildSpanAction buildSpanAction = new BuildSpanAction(buildData, buildSpan.context()); + final BuildSpanAction buildSpanAction = new BuildSpanAction(buildSpan.context()); run.addAction(buildSpanAction); - final StepDataAction stepDataAction = new StepDataAction(); - run.addAction(stepDataAction); + run.addAction(new GitCommitAction()); + run.addAction(new GitRepositoryAction()); + run.addAction(new TraceInfoAction()); + run.addAction(new PipelineQueueInfoAction()); - // Traces - startBuildTrace(buildData, run); logger.fine("End DatadogBuildListener#onInitialize"); } catch (Exception e) { DatadogUtilities.severe(logger, e, "Failed to process build initialization"); } } - private void startBuildTrace(final BuildData buildData, Run run) { - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - logger.fine("CI Visibility is disabled"); - return; - } - - final StepTraceDataAction stepTraceDataAction = new StepTraceDataAction(); - run.addAction(stepTraceDataAction); - - final StageBreakdownAction stageBreakdownAction = new StageBreakdownAction(); - run.addAction(stageBreakdownAction); - - final PipelineQueueInfoAction pipelineQueueInfoAction = new PipelineQueueInfoAction(); - run.addAction(pipelineQueueInfoAction); - - final CIGlobalTagsAction ciGlobalTags = new CIGlobalTagsAction(buildData.getTagsForTraces()); - run.addAction(ciGlobalTags); - } - /** * Called before the SCMCheckout is run in a Jenkins build. * This method is called after onInitialize callback. @@ -164,7 +137,7 @@ public Environment setUpEnvironment(AbstractBuild build, Launcher launcher, Buil logger.fine("Start DatadogBuildListener#setUpEnvironment"); final BuildSpanAction buildSpanAction = build.getAction(BuildSpanAction.class); - if(buildSpanAction == null || buildSpanAction.getBuildData() == null) { + if(buildSpanAction == null || buildSpanAction.getBuildSpanContext() == null) { return new Environment() { }; } @@ -233,15 +206,16 @@ public void onStarted(Run run, TaskListener listener) { Queue queue = getQueue(); Queue.Item item = queue.getItem(run.getQueueId()); Map> tags = buildData.getTags(); - String hostname = buildData.getHostname("unknown"); + String hostname = buildData.getHostname(DatadogUtilities.getHostname(null)); try (Metrics metrics = client.metrics()) { long waitingMs = (DatadogUtilities.currentTimeMillis() - item.getInQueueSince()); metrics.gauge("jenkins.job.waiting", TimeUnit.MILLISECONDS.toSeconds(waitingMs), hostname, tags); - final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); - if(buildSpanAction != null && buildSpanAction.getBuildData() != null) { - buildSpanAction.getBuildData().setMillisInQueue(waitingMs); + PipelineQueueInfoAction queueInfoAction = run.getAction(PipelineQueueInfoAction.class); + if (queueInfoAction != null) { + queueInfoAction.setQueueTimeMillis(waitingMs); } + } catch (NullPointerException e) { logger.warning("Unable to compute 'waiting' metric. " + "item.getInQueueSince() unavailable, possibly due to worker instance provisioning"); @@ -303,7 +277,7 @@ public void onCompleted(Run run, @Nonnull TaskListener listener) { // Send a metric Map> tags = buildData.getTags(); - String hostname = buildData.getHostname("unknown"); + String hostname = buildData.getHostname(DatadogUtilities.getHostname(null)); metrics.gauge("jenkins.job.duration", buildData.getDuration(0L) / 1000, hostname, tags); logger.fine(String.format("[%s]: Duration: %s", buildData.getJobName(null), toTimeString(buildData.getDuration(0L)))); @@ -415,15 +389,8 @@ public void onFinalized(Run run) { } catch (Exception e) { DatadogUtilities.severe(logger, e, "Failed to process build finalization"); } finally { - // If the run belongs to a Jenkins pipeline (based on FlowNodes), - // the `onFinalized` method is executed before processing the last node. - // This means we cannot clean up trace actions at this point if the run is a Jenkins pipeline. - // The trace actions will be removed after last FlowNode has been processed. - // (See DatadogTracePipelineLogic.execute(...) method) - if(!isPipeline(run)) { - // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. - cleanUpTraceActions(run); - } + // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. + cleanUpTraceActions(run); } } @@ -467,7 +434,7 @@ public void onDeleted(Run run) { } // Get the list of global tags to apply - String hostname = buildData.getHostname("unknown"); + String hostname = buildData.getHostname(DatadogUtilities.getHostname(null)); // Send an event final boolean shouldSendEvent = DatadogUtilities.shouldSendEvent(BuildAbortedEventImpl.BUILD_ABORTED_EVENT_NAME); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java index 226216c80..dfdcd49a8 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java @@ -25,30 +25,14 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.listeners; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_AUTHOR_DATE; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_AUTHOR_EMAIL; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_AUTHOR_NAME; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_DATE; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_EMAIL; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_NAME; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_MESSAGE; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_TAG; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isCommitInfoAlreadyCreated; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isRepositoryInfoAlreadyCreated; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidCommit; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidRepositoryURL; - import com.cloudbees.workflow.rest.external.FlowNodeExt; -import hudson.EnvVars; import hudson.Extension; import hudson.model.Queue; -import hudson.model.Run; -import hudson.model.TaskListener; import java.io.IOException; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; @@ -60,22 +44,17 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.clients.Metrics; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; -import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; -import org.datadog.jenkins.plugins.datadog.model.StageData; -import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; +import org.datadog.jenkins.plugins.datadog.model.Status; +import org.datadog.jenkins.plugins.datadog.model.node.NodeInfoAction; +import org.datadog.jenkins.plugins.datadog.model.node.StatusAction; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriter; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; -import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; -import org.jenkinsci.plugins.gitclient.GitClient; import org.jenkinsci.plugins.workflow.actions.ThreadNameAction; import org.jenkinsci.plugins.workflow.actions.TimingAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.cps.nodes.StepEndNode; import org.jenkinsci.plugins.workflow.cps.nodes.StepStartNode; -import org.jenkinsci.plugins.workflow.flow.FlowExecution; import org.jenkinsci.plugins.workflow.flow.GraphListener; import org.jenkinsci.plugins.workflow.graph.BlockEndNode; import org.jenkinsci.plugins.workflow.graph.BlockStartNode; @@ -95,33 +74,13 @@ public class DatadogGraphListener implements GraphListener { @Override public void onNewHead(FlowNode flowNode) { WorkflowRun run = getRun(flowNode); - if (run != null) { - BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); - if (buildSpanAction != null) { - BuildData buildData = buildSpanAction.getBuildData(); - if(!DatadogUtilities.isLastNode(flowNode)){ - final BuildPipelineNode pipelineNode = buildPipelineNode(flowNode); - updateStageBreakdown(run, pipelineNode); - updateBuildData(buildData, run, pipelineNode, flowNode); - } - } - } - - TraceWriter traceWriter = TraceWriterFactory.getTraceWriter(); - if (traceWriter != null) { - try { - traceWriter.submitPipeline(flowNode, run); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - DatadogUtilities.severe(logger, e, "Interrupted while submitting pipeline trace for node " + flowNode.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Error while submitting pipeline trace for node " + flowNode.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); - } + // Filter the node if the job has been excluded from the Datadog plugin configuration. + if (run == null || !DatadogUtilities.isJobTracked(run.getParent().getFullName())) { + return; } - DatadogClient client = ClientFactory.getClient(); - if (client == null){ - return; + if (DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { + processNode(run, flowNode); } if (!isMonitored(flowNode)) { @@ -132,26 +91,46 @@ public void onNewHead(FlowNode flowNode) { StepStartNode startNode = endNode.getStartNode(); int stageDepth = 0; String directParentName = null; + NodeInfoAction nodeInfo = startNode.getAction(NodeInfoAction.class); for (BlockStartNode node : startNode.iterateEnclosingBlocks()) { if (DatadogUtilities.isStageNode(node)) { - if(directParentName == null){ + if (directParentName == null) { directParentName = getStageName(node); } + if (nodeInfo == null) { + nodeInfo = node.getAction(NodeInfoAction.class); + } stageDepth++; } } - if(directParentName == null){ + if (directParentName == null) { directParentName = "root"; } - if (run == null){ + + DatadogClient client = ClientFactory.getClient(); + if (client == null) { return; } try (Metrics metrics = client.metrics()) { String result = DatadogUtilities.getResultTag(endNode); + + String hostname = null; + if (nodeInfo != null) { + String nodeHostname = nodeInfo.getNodeHostname(); + if (nodeHostname != null) { + hostname = nodeHostname; + } else if (DatadogUtilities.isMainNode(nodeInfo.getNodeName())) { + hostname = DatadogUtilities.getHostname(null); + } + } + BuildData buildData = new BuildData(run, flowNode.getExecution().getOwner().getListener()); - String hostname = buildData.getHostname(""); + if (hostname == null) { + hostname = buildData.getHostname(DatadogUtilities.getHostname(null)); + } Map> tags = buildData.getTags(); + TagsUtil.addTagToTags(tags, "stage_name", getStageName(startNode)); TagsUtil.addTagToTags(tags, "parent_stage_name", directParentName); TagsUtil.addTagToTags(tags, "stage_depth", String.valueOf(stageDepth)); @@ -167,253 +146,126 @@ public void onNewHead(FlowNode flowNode) { } } - @Nullable - private BuildPipelineNode buildPipelineNode(FlowNode flowNode) { - long start = System.currentTimeMillis(); + private void processNode(WorkflowRun run, FlowNode flowNode) { try { + List parents = flowNode.getParents(); + for (FlowNode parent : parents) { + if (parent instanceof StepAtomNode) { + // we can only report step node when the next node begins execution, + // since we use start time of the next node to compute end time of the step node + processNode(run, parent, flowNode); + } + } + if (flowNode instanceof BlockEndNode) { - return new BuildPipelineNode((BlockEndNode) flowNode); - } else if(flowNode instanceof StepAtomNode) { - return new BuildPipelineNode((StepAtomNode) flowNode); - } else { - return null; + processStageNode(run, (BlockEndNode) flowNode); } - } finally { - long end = System.currentTimeMillis(); - DatadogAudit.log("DatadogTracePipelineLogic.buildPipelineNode", start, end); + } catch (Exception e) { + DatadogUtilities.severe(logger, e, "Could not process pipeline node " + flowNode.getId() + " (" + flowNode.getDisplayName() + ")"); } } - private void updateBuildData(BuildData buildData, Run run, BuildPipelineNode pipelineNode, FlowNode node) { - long start = System.currentTimeMillis(); - try { - if(pipelineNode == null){ - return; - } - - buildData.setPropagatedMillisInQueue(TimeUnit.NANOSECONDS.toMillis(DatadogUtilities.getNanosInQueue(pipelineNode))); - - final String gitBranch = GitUtils.resolveGitBranch(pipelineNode.getEnvVars(), null); - if(gitBranch != null && buildData.getBranch("").isEmpty()) { - buildData.setBranch(gitBranch); - } - - final String gitUrl = GitUtils.resolveGitRepositoryUrl(pipelineNode.getEnvVars(), null); - if(gitUrl != null && buildData.getGitUrl("").isEmpty()) { - buildData.setGitUrl(gitUrl); - } - - final String gitCommit = GitUtils.resolveGitCommit(pipelineNode.getEnvVars(), null); - final String buildDataGitCommit = buildData.getGitCommit(""); - if(gitCommit != null && (buildDataGitCommit.isEmpty() || !isValidCommit(buildDataGitCommit))){ - buildData.setGitCommit(gitCommit); - } - - // Git tag can only be set manually by the user. - // Otherwise, Jenkins reports it in the branch. - final String gitTag = pipelineNode.getEnvVars().get(DD_GIT_TAG); - if(gitTag != null && buildData.getGitTag("").isEmpty()){ - buildData.setGitTag(gitTag); - } - - // Git data supplied by the user has prevalence. We set them first. - // Only the data that has not been set will be updated later. - final String ddGitMessage = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_MESSAGE); - if(ddGitMessage != null && buildData.getGitMessage("").isEmpty()) { - buildData.setGitMessage(ddGitMessage); - } - - final String ddGitAuthorName = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_AUTHOR_NAME); - if(ddGitAuthorName != null && buildData.getGitAuthorName("").isEmpty()) { - buildData.setGitAuthorName(ddGitAuthorName); - } - - final String ddGitAuthorEmail = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_AUTHOR_EMAIL); - if(ddGitAuthorEmail != null && buildData.getGitAuthorEmail("").isEmpty()) { - buildData.setGitAuthorEmail(ddGitAuthorEmail); - } - - final String ddGitAuthorDate = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_AUTHOR_DATE); - if(ddGitAuthorDate != null && buildData.getGitAuthorDate("").isEmpty()) { - buildData.setGitAuthorDate(ddGitAuthorDate); - } - - final String ddGitCommitterName = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_COMMITTER_NAME); - if(ddGitCommitterName != null && buildData.getGitCommitterName("").isEmpty()){ - buildData.setGitCommitterName(ddGitCommitterName); - } - - final String ddGitCommitterEmail = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_COMMITTER_EMAIL); - if(ddGitCommitterEmail != null && buildData.getGitCommitterEmail("").isEmpty()){ - buildData.setGitCommitterEmail(ddGitCommitterEmail); - } - - final String ddGitCommitterDate = pipelineNode.getEnvVars().get(DD_GIT_COMMIT_COMMITTER_DATE); - if(ddGitCommitterDate != null && buildData.getGitCommitterDate("").isEmpty()){ - buildData.setGitCommitterDate(ddGitCommitterDate); - } - - // The Git client will be not null if there is some git information to calculate. - // We use the same Git client instance to calculate all git information - // because creating a Git client is a very expensive operation. - final GitClient gitClient = getGitClient(run, pipelineNode, node, gitUrl, gitCommit); - - if(gitClient != null) { - final GitCommitAction commitAction = buildGitCommitAction(run, gitClient, pipelineNode); - if(commitAction != null) { - if(buildData.getGitMessage("").isEmpty()){ - buildData.setGitMessage(commitAction.getMessage()); - } - - if(buildData.getGitAuthorName("").isEmpty()) { - buildData.setGitAuthorName(commitAction.getAuthorName()); - } - - if(buildData.getGitAuthorEmail("").isEmpty()){ - buildData.setGitAuthorEmail(commitAction.getAuthorEmail()); - } - - if(buildData.getGitAuthorDate("").isEmpty()){ - buildData.setGitAuthorDate(commitAction.getAuthorDate()); - } - - if(buildData.getGitCommitterName("").isEmpty()){ - buildData.setGitCommitterName(commitAction.getCommitterName()); - } - - if(buildData.getGitCommitterEmail("").isEmpty()){ - buildData.setGitCommitterEmail(commitAction.getCommitterEmail()); - } - - if(buildData.getGitCommitterDate("").isEmpty()){ - buildData.setGitCommitterDate(commitAction.getCommitterDate()); - } - } - } - - if(gitClient != null) { - final GitRepositoryAction repositoryAction = buildGitRepositoryAction(run, gitClient, pipelineNode); - if(repositoryAction != null) { - if(buildData.getGitDefaultBranch("").isEmpty()) { - buildData.setGitDefaultBranch(repositoryAction.getDefaultBranch()); - } - } - } + private void processStageNode(WorkflowRun run, BlockEndNode blockEndNode) { + if (!DatadogUtilities.isStageNode(blockEndNode.getStartNode())) { + return; + } + processNode(run, blockEndNode, null); + } - final String workspace = pipelineNode.getWorkspace(); - if(workspace != null && buildData.getWorkspace("").isEmpty()){ - buildData.setWorkspace(workspace); - } + private void processNode(WorkflowRun run, FlowNode node, FlowNode nextNode) { + try { + BuildPipelineNode pipelineNode = buildPipelineNode(run, node, nextNode); + propagateStatus(node, nextNode); - final String nodeName = pipelineNode.getNodeName(); - if(nodeName != null && buildData.getNodeName("").isEmpty()){ - buildData.setNodeName(nodeName); + TraceWriter traceWriter = TraceWriterFactory.getTraceWriter(); + if (traceWriter != null) { + traceWriter.submitPipeline(pipelineNode, run); } - final String nodeHostname = pipelineNode.getNodeHostname(); - if(nodeHostname != null && buildData.getHostname("").isEmpty()) { - buildData.setHostname(nodeHostname); - } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + DatadogUtilities.severe(logger, e, "Interrupted while submitting pipeline trace for node " + node.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); + } catch (Exception e) { + DatadogUtilities.severe(logger, e, "Error while submitting pipeline trace for node " + node.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); } finally { - long end = System.currentTimeMillis(); - DatadogAudit.log("DatadogTracePipelineLogic.updateBuildData", start, end); + DatadogUtilities.cleanUpTraceActions(node); } } - /** - * Creates a new Git client only if there is a Git information pending to calculate. - * This method tries to avoid creating Git clients as much as possible cause it's a very expensive operation. - * @param run - * @param pipelineNode - * @param node - * @param gitUrl - * @param gitCommit - * @return a git client if there is some git information to calculate. In other cases, it returns null. - */ - private GitClient getGitClient(final Run run, final BuildPipelineNode pipelineNode, final FlowNode node, final String gitUrl, final String gitCommit) { - GitClient gitClient = null; - try { - if(!isValidCommit(gitCommit) && !isValidRepositoryURL(gitUrl)) { - return null; + private void propagateStatus(FlowNode flowNode, @Nullable FlowNode nextNode) { + Status status = getPropagatedStatus(flowNode, nextNode); + if (status == Status.UNSTABLE) { + BlockStartNode stageNode = DatadogUtilities.getEnclosingStageNode(flowNode); + if (stageNode != null) { + stageNode.addOrReplaceAction(new StatusAction(Status.UNSTABLE, true)); } - final boolean commitInfoAlreadyCreated = isCommitInfoAlreadyCreated(run, gitCommit); - final boolean repoInfoAlreadyCreated = isRepositoryInfoAlreadyCreated(run, gitUrl); - - // Only if there is some git information pending to obtain, we create a Git client. - if(!commitInfoAlreadyCreated || !repoInfoAlreadyCreated) { - final TaskListener listener = node.getExecution().getOwner().getListener(); - final EnvVars envVars = new EnvVars(pipelineNode.getEnvVars()); + } else if (status == Status.ERROR) { + // propagating "error" status is different from propagating "unstable", + // since error can be caught and suppressed + for (BlockStartNode enclosingNode : flowNode.iterateEnclosingBlocks()) { + String catchErrorResult = DatadogUtilities.getCatchErrorResult(enclosingNode); + if (catchErrorResult != null) { + // encountered a "catchError" or a "warnError" block; + // will propagate the updated result to the first visible (non-internal) node, and then stop + BlockStartNode stageNode = DatadogUtilities.getEnclosingStageNode(enclosingNode); + if (stageNode != null) { + stageNode.addOrReplaceAction(new StatusAction(Status.fromJenkinsResult(catchErrorResult), false)); + } + break; + } - // Create a new Git client is a very expensive operation. - // Avoid creating Git clients as much as possible. - gitClient = GitUtils.newGitClient(run, listener, envVars, pipelineNode.getNodeName(), pipelineNode.getWorkspace()); + if (isTraceable(enclosingNode)) { + enclosingNode.addOrReplaceAction(new StatusAction(status, true)); + break; + } } - } catch (Exception ex) { - logger.fine("Unable to get GitClient. Error: " + ex); } - return gitClient; } - private GitCommitAction buildGitCommitAction(Run run, GitClient gitClient, BuildPipelineNode pipelineNode) { - try { - final String gitCommit = GitUtils.resolveGitCommit(pipelineNode.getEnvVars(), null); - if(!isValidCommit(gitCommit)) { - return null; - } - - return GitUtils.buildGitCommitAction(run, gitClient, gitCommit); - } catch (Exception e) { - logger.fine("Unable to build GitCommitAction. Error: " + e); - return null; - } + private static boolean isTraceable(FlowNode node) { + return node instanceof BlockStartNode && DatadogUtilities.isStageNode(node) || node instanceof StepAtomNode; } - private GitRepositoryAction buildGitRepositoryAction(Run run, GitClient gitClient, BuildPipelineNode pipelineNode) { - try { - final String gitRepositoryURL = GitUtils.resolveGitRepositoryUrl(pipelineNode.getEnvVars(), null); - if(!isValidRepositoryURL(gitRepositoryURL)){ - return null; + private static Status getPropagatedStatus(FlowNode node, @Nullable FlowNode nextNode) { + Status nodeStatus = Status.fromJenkinsResult(DatadogUtilities.getResultTag(node)); + + StatusAction statusAction; + if (node instanceof BlockEndNode) { + BlockStartNode startNode = ((BlockEndNode) node).getStartNode(); + statusAction = startNode.getAction(StatusAction.class); + return statusAction != null && statusAction.isPropagate() ? Status.combine(nodeStatus, statusAction.getStatus()) : nodeStatus; + } else { // StepAtomNode + if (nodeStatus == Status.ERROR) { + // nextNode is the next node in the execution graph. + // If node is a step, the next node might be the end of the "script" block that wraps this step. + // We check if this is the case (display function name is "{") + if (nextNode != null && "}".equals(nextNode.getDisplayFunctionName()) && nextNode.getError() == null) { + // Status is ERROR, but wrapping script block has no error object, + // most likely there is a "catch" block in a scripted pipeline, + // do not propagate status. + return Status.UNKNOWN; + } } - - final EnvVars envVars = new EnvVars(pipelineNode.getEnvVars()); - return GitUtils.buildGitRepositoryAction(run, gitClient, envVars, gitRepositoryURL); - } catch (Exception e) { - logger.fine("Unable to build GitRepositoryAction. Error: " + e); - return null; + return nodeStatus; } } - private void updateStageBreakdown(final Run run, BuildPipelineNode pipelineNode) { - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return; - } - + private BuildPipelineNode buildPipelineNode(WorkflowRun run, FlowNode node, FlowNode nextNode) { long start = System.currentTimeMillis(); try { - final StageBreakdownAction stageBreakdownAction = run.getAction(StageBreakdownAction.class); - if(stageBreakdownAction == null){ - return; - } - - if(pipelineNode == null){ - return; - } - - if(!BuildPipelineNode.NodeType.STAGE.equals(pipelineNode.getType())){ - return; + if (node instanceof StepAtomNode) { + return new BuildPipelineNode(run, (StepAtomNode) node, nextNode); + } else if (node instanceof BlockEndNode) { + BlockEndNode endNode = (BlockEndNode) node; + return new BuildPipelineNode(run, endNode.getStartNode(), endNode); + } else { + throw new IllegalArgumentException("Unexpected flow node type: " + node); } - final StageData stageData = StageData.builder() - .withName(pipelineNode.getName()) - .withStartTimeInMicros(pipelineNode.getStartTimeMicros()) - .withEndTimeInMicros(pipelineNode.getEndTimeMicros()) - .build(); - - stageBreakdownAction.put(stageData.getName(), stageData); } finally { long end = System.currentTimeMillis(); - DatadogAudit.log("DatadogTracePipelineLogic.updateStageBreakdown", start, end); + DatadogAudit.log("DatadogTracePipelineLogic.buildPipelineNode", start, end); } } @@ -452,24 +304,10 @@ private long getPauseDurationMillis(@Nonnull FlowNode startNode) { private boolean isMonitored(FlowNode flowNode) { // Filter the node out if it is not the end of step // Timing information is only available once the step has completed. - if (!(flowNode instanceof StepEndNode)) { - return false; - } - - // Filter the node if the job has been excluded from the Datadog plugin configuration. - WorkflowRun run = getRun(flowNode); - if (run == null || !DatadogUtilities.isJobTracked(run.getParent().getFullName())) { - return false; - } // Filter the node out if it is not the end of a stage. // The plugin only monitors timing information of stages - if (!DatadogUtilities.isStageNode(((StepEndNode) flowNode).getStartNode())) { - return false; - } - - // Finally return true as this node is the end of a monitored stage. - return true; + return flowNode instanceof StepEndNode && DatadogUtilities.isStageNode(((StepEndNode) flowNode).getStartNode()); } @CheckForNull @@ -505,25 +343,4 @@ long getTime(FlowNode startNode, FlowNode endNode) { } return 0; } - - /** - * Gets the jenkins run object of the specified executing workflow. - * - * @param exec execution of a workflow - * @return jenkins run object of a job - */ - private static @CheckForNull Run runFor(FlowExecution exec) { - Queue.Executable executable; - try { - executable = exec.getOwner().getExecutable(); - } catch (IOException x) { - DatadogUtilities.severe(logger, x, "Failed to get Jenkins executable"); - return null; - } - if (executable instanceof Run) { - return (Run) executable; - } else { - return null; - } - } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogQueueListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogQueueListener.java index 6e3722623..366572ba2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogQueueListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogQueueListener.java @@ -4,19 +4,24 @@ import hudson.model.Queue; import hudson.model.Run; import hudson.model.queue.QueueListener; -import org.datadog.jenkins.plugins.datadog.model.FlowNodeQueueData; -import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; -import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; -import org.jenkinsci.plugins.workflow.flow.FlowExecution; -import org.jenkinsci.plugins.workflow.graph.FlowNode; -import org.jenkinsci.plugins.workflow.support.steps.ExecutorStepExecution; - -import javax.annotation.CheckForNull; import java.io.IOException; +import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; +import javax.annotation.CheckForNull; +import org.datadog.jenkins.plugins.datadog.DatadogUtilities; +import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; +import org.datadog.jenkins.plugins.datadog.model.node.DequeueAction; +import org.datadog.jenkins.plugins.datadog.model.node.EnqueueAction; +import org.datadog.jenkins.plugins.datadog.model.node.QueueInfoAction; +import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; +import org.jenkinsci.plugins.workflow.flow.FlowExecution; +import org.jenkinsci.plugins.workflow.graph.BlockStartNode; +import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.jenkinsci.plugins.workflow.graph.FlowStartNode; +import org.jenkinsci.plugins.workflow.support.steps.ExecutorStepExecution; @Extension public class DatadogQueueListener extends QueueListener { @@ -26,6 +31,10 @@ public class DatadogQueueListener extends QueueListener { @Override public void onEnterBuildable(Queue.BuildableItem item) { try { + if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { + return; + } + final Queue.Task task = item.task; if(task == null) { logger.fine("onEnterBuildable: item: " + item + ", task is null"); @@ -47,26 +56,7 @@ public void onEnterBuildable(Queue.BuildableItem item) { return; } - final Run run = runFor(flowNode.getExecution()); - if(run == null) { - logger.fine("onEnterBuildable FlowNode: " + flowNode + ", run is null."); - return; - } - - final PipelineQueueInfoAction queueAction = run.getAction(PipelineQueueInfoAction.class); - if(queueAction == null){ - logger.fine("onEnterBuildable: queueAction: is null"); - return; - } - - final FlowNodeQueueData flowNodeData = queueAction.get(run, flowNode.getId()); - if(flowNodeData != null) { - flowNodeData.setEnterBuildableNanos(System.nanoTime()); - } else { - final FlowNodeQueueData data = new FlowNodeQueueData(flowNode.getId()); - data.setEnterBuildableNanos(System.nanoTime()); - queueAction.put(run, flowNode.getId(), data); - } + flowNode.addOrReplaceAction(new EnqueueAction(System.nanoTime())); } catch (Exception e){ logger.severe("Error onEnterBuildable: item:" + item + ", exception: " + e); @@ -76,6 +66,10 @@ public void onEnterBuildable(Queue.BuildableItem item) { @Override public void onLeaveBuildable(Queue.BuildableItem item) { try { + if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { + return; + } + final Queue.Task task = item.task; if(task == null) { logger.fine("onLeaveBuildable: item: " + item + ", task is null"); @@ -96,27 +90,61 @@ public void onLeaveBuildable(Queue.BuildableItem item) { return; } - Run run = runFor(flowNode.getExecution()); - if(run == null) { - logger.fine("onLeaveBuildable FlowNode: " + flowNode + ", run is null."); + EnqueueAction enqueueAction = flowNode.getAction(EnqueueAction.class); + if (enqueueAction == null) { + logger.fine("onLeaveBuildable FlowNode: " + flowNode + ", enqueueAction is null."); return; } - PipelineQueueInfoAction queueAction = run.getAction(PipelineQueueInfoAction.class); - if(queueAction == null){ - logger.fine("onLeaveBuildable: queueAction: is null"); - return; - } + long queueDurationNanos = System.nanoTime() - enqueueAction.getTimestampNanos(); + DequeueAction queueInfoAction = new DequeueAction(queueDurationNanos); + + // Replace enqueue action with dequeue action in one call to avoid writing to disk twice + flowNode.replaceActions(QueueInfoAction.class, queueInfoAction); + + propagateQueueTime(flowNode, queueInfoAction); - final FlowNodeQueueData flowNodeData = queueAction.get(run, flowNode.getId()); - if(flowNodeData != null) { - flowNodeData.setLeaveBuildableNanos(System.nanoTime()); - } } catch (Exception e){ logger.severe("Error onLeaveBuildable: item:" + item + ", exception: " + e); } } + private static void propagateQueueTime(FlowNode flowNode, DequeueAction queueInfoAction) { + if (flowNode.getDisplayName().contains("Allocate node")) { + BlockStartNode enclosingNode = DatadogUtilities.getEnclosingStageNode(flowNode); + if (enclosingNode != null) { + // propagate queue duration + enclosingNode.addOrReplaceAction(queueInfoAction); + } + } + + List parents = flowNode.getParents(); + if (parents.size() != 1) { + return; + } + + FlowNode parent = parents.iterator().next(); + if (!(parent instanceof FlowStartNode)) { + // propagate queue time to build level only if dequeued node is the direct child of pipeline node + return; + } + + Run run = runFor(flowNode.getExecution()); + if (run == null) { + logger.fine("onLeaveBuildable FlowNode: " + flowNode + ", run is null."); + return; + } + + PipelineQueueInfoAction pipelineQueueInfoAction = run.getAction(PipelineQueueInfoAction.class); + if (pipelineQueueInfoAction == null) { + logger.fine("onLeaveBuildable FlowNode: " + flowNode + ", pipelineQueueInfoAction is null."); + return; + } + + long queueTimeMillis = TimeUnit.NANOSECONDS.toMillis(queueInfoAction.getQueueTimeNanos()); + pipelineQueueInfoAction.setPropagatedQueueTimeMillis(queueTimeMillis); + } + /** * Gets the FlowNode from the PlaceholderTask asynchronous. * diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java index 9e34a4af6..40b1dee16 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java @@ -26,7 +26,9 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.listeners; import static org.datadog.jenkins.plugins.datadog.events.SCMCheckoutCompletedEventImpl.SCM_CHECKOUT_COMPLETED_EVENT_NAME; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_DEFAULT_BRANCH; +import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.model.Run; @@ -34,19 +36,28 @@ of this software and associated documentation files (the "Software"), to deal import hudson.model.listeners.SCMListener; import hudson.scm.SCM; import hudson.scm.SCMRevisionState; +import java.io.File; +import java.io.IOException; +import java.util.Map; +import java.util.Set; +import java.util.logging.Logger; +import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.DatadogJobProperty; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; +import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; import org.datadog.jenkins.plugins.datadog.events.SCMCheckoutCompletedEventImpl; import org.datadog.jenkins.plugins.datadog.model.BuildData; - -import java.io.File; -import java.io.IOException; -import java.util.Map; -import java.util.Set; -import java.util.logging.Logger; +import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; +import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; +import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils; +import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; +import org.datadog.jenkins.plugins.datadog.util.git.RepositoryInfo; +import org.eclipse.jgit.lib.PersonIdent; +import org.eclipse.jgit.revwalk.RevCommit; +import org.jenkinsci.plugins.gitclient.GitClient; /** * This class registers an {@link SCMListener} with Jenkins which allows us to create @@ -74,16 +85,29 @@ public void onCheckout(Run build, SCM scm, FilePath workspace, TaskListene File changelogFile, SCMRevisionState pollingBaseline) throws Exception { try { // Process only if job is NOT in excluded and is in included + if (!DatadogUtilities.isJobTracked(build.getParent().getFullName())) { + return; + } + + if (isGit(scm)) { + EnvVars environment = build.getEnvironment(listener); + GitClient gitClient = GitUtils.newGitClient(listener, environment, workspace); + if (gitClient != null) { + populateCommitInfo(build, gitClient); + populateRepositoryInfo(build, gitClient, environment); + } + } + DatadogJobProperty prop = DatadogUtilities.getDatadogJobProperties(build); - if (!(DatadogUtilities.isJobTracked(build.getParent().getFullName()) - && prop != null && prop.isEmitSCMEvents())) { + if (prop == null || !prop.isEmitSCMEvents()) { return; } + logger.fine("Start DatadogSCMListener#onCheckout"); // Get Datadog Client Instance DatadogClient client = ClientFactory.getClient(); - if(client == null){ + if (client == null) { return; } @@ -114,4 +138,127 @@ public void onCheckout(Run build, SCM scm, FilePath workspace, TaskListene } } + private boolean isGit(SCM scm) { + if (scm == null) { + return false; + } + String scmType = scm.getType(); + return scmType != null && scmType.toLowerCase().contains("git"); + } + + private void populateCommitInfo(final Run run, final GitClient gitClient) { + long start = System.currentTimeMillis(); + try { + GitCommitAction commitAction = run.getAction(GitCommitAction.class); + if (commitAction == null) { + logger.fine("Unable to get git commit information. GitCommitAction is null."); + return; + } + + String gitCommit = commitAction.getCommit(); + if (gitCommit == null) { + gitCommit = "HEAD"; + } + + final RevCommit revCommit = GitUtils.searchRevCommit(gitClient, gitCommit); + if (revCommit == null) { + logger.fine("Unable to get git commit information. RevCommit is null. [gitCommit: " + gitCommit + "]"); + return; + } + + commitAction.setCommit(revCommit.getName()); + + String message; + try { + message = StringUtils.abbreviate(revCommit.getFullMessage(), 1500); + } catch (Exception e) { + logger.fine("Unable to obtain git commit full message. Selecting short message. Error: " + e); + message = revCommit.getShortMessage(); + } + commitAction.setMessage(message); + + final PersonIdent authorIdent = revCommit.getAuthorIdent(); + if (authorIdent != null) { + commitAction.setAuthorName(authorIdent.getName()); + commitAction.setAuthorEmail(authorIdent.getEmailAddress()); + commitAction.setAuthorDate(DatadogUtilities.toISO8601(authorIdent.getWhen())); + } + + final PersonIdent committerIdent = revCommit.getCommitterIdent(); + if (committerIdent != null) { + commitAction.setCommitterName(committerIdent.getName()); + commitAction.setCommitterEmail(committerIdent.getEmailAddress()); + commitAction.setCommitterDate(DatadogUtilities.toISO8601(committerIdent.getWhen())); + } + + } catch (Exception e) { + logger.fine("Unable to get git commit information. Error: " + e); + + } finally { + long end = System.currentTimeMillis(); + DatadogAudit.log("GitUtils.buildGitCommitAction", start, end); + } + } + + private void populateRepositoryInfo(final Run run, final GitClient gitClient, final EnvVars environment) { + long start = System.currentTimeMillis(); + try { + GitRepositoryAction repoAction = run.getAction(GitRepositoryAction.class); + if (repoAction == null) { + logger.fine("Unable to get git repo information. GitCommitAction is null."); + return; + } + + populateRepositoryInfoFromEnvVars(environment, repoAction); + + RepositoryInfo repositoryInfo = GitUtils.searchRepositoryInfo(gitClient); + if (repositoryInfo == null) { + logger.fine("Unable to build GitRepositoryAction. RepositoryInfo is null"); + return; + } + + if (repoAction.getRepositoryURL() != null && !repoAction.getRepositoryURL().equals(repositoryInfo.getRepoUrl())) { + logger.fine("Git repo URL differs, stored in action: " + repoAction.getRepositoryURL() + ", found: " + repositoryInfo.getRepoUrl()); + return; + } + + if (repoAction.getRepositoryURL() == null) { + repoAction.setRepositoryURL(repositoryInfo.getRepoUrl()); + } + + if (repoAction.getBranch() == null) { + repoAction.setBranch(repositoryInfo.getBranch()); + } + + if (repoAction.getDefaultBranch() == null) { + repoAction.setDefaultBranch(repositoryInfo.getDefaultBranch()); + } + + } catch (Exception e) { + logger.fine("Unable to get git repo information. Error: " + e); + + } finally { + long end = System.currentTimeMillis(); + DatadogAudit.log("GitUtils.buildGitRepositoryAction", start, end); + } + } + + /** This duplicates logic available in step listener, because step listener is not called for freestyle jobs. */ + private static void populateRepositoryInfoFromEnvVars(EnvVars environment, GitRepositoryAction repoAction) { + final String gitUrl = GitUtils.resolveGitRepositoryUrl(environment); + if (gitUrl != null && !gitUrl.isEmpty()) { + repoAction.setRepositoryURL(gitUrl); + } + + final String defaultBranch = GitInfoUtils.normalizeBranch(environment.get(DD_GIT_DEFAULT_BRANCH)); + if (defaultBranch != null && !defaultBranch.isEmpty()) { + repoAction.setDefaultBranch(defaultBranch); + } + + final String gitBranch = GitUtils.resolveGitBranch(environment); + if(gitBranch != null && !gitBranch.isEmpty()) { + repoAction.setBranch(gitBranch); + } + } + } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java index 403dfe4ad..be217cc22 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java @@ -1,12 +1,37 @@ package org.datadog.jenkins.plugins.datadog.listeners; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_AUTHOR_DATE; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_AUTHOR_EMAIL; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_AUTHOR_NAME; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_DATE; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_EMAIL; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_NAME; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_MESSAGE; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_DEFAULT_BRANCH; +import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_TAG; + +import hudson.EnvVars; import hudson.Extension; +import hudson.FilePath; +import hudson.model.Computer; import hudson.model.Run; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; +import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; +import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StepData; -import org.datadog.jenkins.plugins.datadog.traces.StepDataAction; +import org.datadog.jenkins.plugins.datadog.model.node.NodeInfoAction; +import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; +import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils; +import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.flow.StepListener; import org.jenkinsci.plugins.workflow.graph.BlockStartNode; @@ -14,10 +39,6 @@ import org.jenkinsci.plugins.workflow.steps.Step; import org.jenkinsci.plugins.workflow.steps.StepContext; -import javax.annotation.Nonnull; -import java.util.Iterator; -import java.util.logging.Logger; - @Extension public class DatadogStepListener implements StepListener { @@ -27,9 +48,8 @@ public class DatadogStepListener implements StepListener { public void notifyOfNewStep(@Nonnull Step step, @Nonnull StepContext context) { try { final Run run = context.get(Run.class); - final StepDataAction stepDataAction = run.getAction(StepDataAction.class); - if(stepDataAction == null) { - logger.fine("Unable to store Step data in Run '"+run.getFullDisplayName()+"'. StepDataAction is null"); + if (run == null) { + logger.severe("Unable to store Step data for step '" + step + "'. Run is null"); return; } @@ -43,11 +63,22 @@ public void notifyOfNewStep(@Nonnull Step step, @Nonnull StepContext context) { return; } - final StepData stepData = new StepData(context); - stepDataAction.put(run, flowNode, stepData); + Map envVars = getEnvVars(context); + updateGitData(run, envVars); + updateBuildData(run, envVars); - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return; + String nodeName = getNodeName(context); + String nodeHostname = getNodeHostname(context); + Set nodeLabels = getNodeLabels(context); + String nodeWorkspace = getNodeWorkspace(context); + NodeInfoAction nodeInfoAction = new NodeInfoAction(nodeName, nodeHostname, nodeLabels, nodeWorkspace); + + if (DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { + // propagate node info to stage node + BlockStartNode stageNode = DatadogUtilities.getEnclosingStageNode(flowNode); + if (stageNode != null) { + stageNode.addOrReplaceAction(nodeInfoAction); + } } // We use the PipelineNodeInfoAction to propagate @@ -74,21 +105,206 @@ public void notifyOfNewStep(@Nonnull Step step, @Nonnull StepContext context) { // If the parent block from the first 'Allocate node : Start' node is the 'Start of Pipeline' node // the worker node where this Step was executed will be the worker node for the pipeline. - findStartOfPipeline(run, stepData, firstAllocateNodeStart); + findStartOfPipeline(run, nodeInfoAction, firstAllocateNodeStart); } catch (Exception ex) { logger.severe("Unable to extract Run information of the StepContext. " + ex); } } - private void findStartOfPipeline(final Run run, final StepData stepData, final FlowNode firstAllocateNodeStart) { + /** + * Returns the nodeName of the remote node which is executing a determined {@code Step} + * @param stepContext + * @return node name of the remote node. + */ + private static String getNodeName(StepContext stepContext) { + try { + Computer computer = stepContext.get(Computer.class); + return DatadogUtilities.getNodeName(computer); + } catch (Exception e){ + logger.fine("Unable to extract the node name from StepContext."); + return null; + } + } + + /** + * Returns the hostname of the remote node which is executing a determined {@code Step} + * See {@code Computer.getHostName()} + * @param stepContext + * @return hostname of the remote node. + */ + private static String getNodeHostname(final StepContext stepContext) { + try { + EnvVars envVars = stepContext.get(EnvVars.class); + if (envVars != null) { + String ddHostname = envVars.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME); + if (ddHostname != null) { + return ddHostname; + } + String hostname = envVars.get("HOSTNAME"); + if (hostname != null) { + return hostname; + } + } + } catch (Exception e){ + logger.fine("Unable to extract environment variables from StepContext."); + } + + try { + Computer computer = stepContext.get(Computer.class); + if(computer != null) { + return computer.getHostName(); + } + } catch (Exception e){ + logger.fine("Unable to extract hostname from StepContext."); + } + + return null; + } + + /** + * Returns the nodeLabels of the remote node which is executing a determined {@code Step} + * @param stepContext + * @return node labels of the remote node. + */ + private static Set getNodeLabels(StepContext stepContext) { + try { + Computer computer = stepContext.get(Computer.class); + return DatadogUtilities.getNodeLabels(computer); + } catch (Exception e) { + logger.fine("Unable to extract the node labels from StepContext."); + return Collections.emptySet(); + } + } + + /** + * Returns the workspace filepath of the remote node which is executing a determined {@code Step} + * @return absolute filepath of the workspace of the remote node. + */ + private static String getNodeWorkspace(final StepContext stepContext) { + FilePath filePath = null; + try { + filePath = stepContext.get(FilePath.class); + } catch (Exception e){ + logger.fine("Unable to extract FilePath information of the StepContext."); + } + + if(filePath == null) { + return null; + } + + return filePath.getRemote(); + } + + /** + * Returns {@code Map} with environment variables of a certain {@code StepContext} + * @return map with environment variables of a stepContext. + */ + private static Map getEnvVars(StepContext stepContext) { + EnvVars envVarsObj = null; + try { + envVarsObj = stepContext.get(EnvVars.class); + } catch (Exception e){ + logger.fine("Unable to extract environment variables from StepContext."); + } + + if(envVarsObj == null) { + return Collections.emptyMap(); + } + return envVarsObj.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private void updateBuildData(Run run, Map envVars) { + BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); + if (buildSpanAction == null) { + return; + } + String buildUrl = envVars.get("BUILD_URL"); + if (buildUrl != null) { + buildSpanAction.setBuildUrl(buildUrl); + } + } + + private static void updateGitData(Run run, Map envVars) { + GitCommitAction commitAction = run.getAction(GitCommitAction.class); + if (commitAction != null) { + // Git tag can only be set manually by the user. + // Otherwise, Jenkins reports it in the branch. + final String gitTag = envVars.get(DD_GIT_TAG); + if(gitTag != null){ + commitAction.setTag(gitTag); + } + + final String gitCommit = GitUtils.resolveGitCommit(envVars); + if(gitCommit != null) { + commitAction.setCommit(gitCommit); + } + + // Git data supplied by the user has prevalence. We set them first. + // Only the data that has not been set will be updated later. + final String ddGitMessage = envVars.get(DD_GIT_COMMIT_MESSAGE); + if(ddGitMessage != null) { + commitAction.setMessage(ddGitMessage); + } + + final String ddGitAuthorName = envVars.get(DD_GIT_COMMIT_AUTHOR_NAME); + if(ddGitAuthorName != null) { + commitAction.setAuthorName(ddGitAuthorName); + } + + final String ddGitAuthorEmail = envVars.get(DD_GIT_COMMIT_AUTHOR_EMAIL); + if(ddGitAuthorEmail != null) { + commitAction.setAuthorEmail(ddGitAuthorEmail); + } + + final String ddGitAuthorDate = envVars.get(DD_GIT_COMMIT_AUTHOR_DATE); + if(ddGitAuthorDate != null) { + commitAction.setAuthorDate(ddGitAuthorDate); + } + + final String ddGitCommitterName = envVars.get(DD_GIT_COMMIT_COMMITTER_NAME); + if(ddGitCommitterName != null) { + commitAction.setCommitterName(ddGitCommitterName); + } + + final String ddGitCommitterEmail = envVars.get(DD_GIT_COMMIT_COMMITTER_EMAIL); + if(ddGitCommitterEmail != null) { + commitAction.setCommitterEmail(ddGitCommitterEmail); + } + + final String ddGitCommitterDate = envVars.get(DD_GIT_COMMIT_COMMITTER_DATE); + if(ddGitCommitterDate != null) { + commitAction.setCommitterDate(ddGitCommitterDate); + } + } + + GitRepositoryAction repositoryAction = run.getAction(GitRepositoryAction.class); + if (repositoryAction != null) { + final String gitUrl = GitUtils.resolveGitRepositoryUrl(envVars); + if (gitUrl != null && !gitUrl.isEmpty()) { + repositoryAction.setRepositoryURL(gitUrl); + } + + final String defaultBranch = GitInfoUtils.normalizeBranch(envVars.get(DD_GIT_DEFAULT_BRANCH)); + if (defaultBranch != null && !defaultBranch.isEmpty()) { + repositoryAction.setDefaultBranch(defaultBranch); + } + + final String gitBranch = GitUtils.resolveGitBranch(envVars); + if(gitBranch != null && !gitBranch.isEmpty()) { + repositoryAction.setBranch(gitBranch); + } + } + } + + private void findStartOfPipeline(final Run run, final NodeInfoAction nodeInfoAction, final FlowNode firstAllocateNodeStart) { long start = System.currentTimeMillis(); try { final Iterator blockStartNodes = firstAllocateNodeStart.iterateEnclosingBlocks().iterator(); if(blockStartNodes.hasNext()) { final FlowNode candidate = blockStartNodes.next(); if("Start of Pipeline".equals(candidate.getDisplayName())) { - run.addAction(new PipelineNodeInfoAction(stepData.getNodeName() != null ? stepData.getNodeName() : "master", stepData.getNodeLabels(), stepData.getNodeHostname())); + run.addOrReplaceAction(new PipelineNodeInfoAction(nodeInfoAction.getNodeName() != null ? nodeInfoAction.getNodeName() : "master", nodeInfoAction.getNodeLabels(), nodeInfoAction.getNodeHostname(), nodeInfoAction.getNodeWorkspace())); } } } finally { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java index 1f016a157..be705e267 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java @@ -33,11 +33,7 @@ of this software and associated documentation files (the "Software"), to deal import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_NAME; import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_MESSAGE; import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.GIT_BRANCH; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isCommitInfoAlreadyCreated; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isRepositoryInfoAlreadyCreated; import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isUserSuppliedGit; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidCommit; -import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidRepositoryURL; import com.cloudbees.plugins.credentials.CredentialsParameterValue; import hudson.EnvVars; @@ -66,16 +62,17 @@ of this software and associated documentation files (the "Software"), to deal import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; +import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanManager; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; -import org.jenkinsci.plugins.gitclient.GitClient; +import org.jenkinsci.plugins.workflow.cps.EnvActionImpl; public class BuildData implements Serializable { @@ -138,68 +135,100 @@ public class BuildData implements Serializable { private String traceId; private String spanId; - @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") - public BuildData(Run run, TaskListener listener) throws IOException, InterruptedException { + public BuildData(Run run, @Nullable TaskListener listener) throws IOException, InterruptedException { if (run == null) { return; } - EnvVars envVars; - if(listener != null){ - envVars = run.getEnvironment(listener); - }else{ - envVars = run.getEnvironment(new LogTaskListener(LOGGER, Level.INFO)); - } + EnvVars envVars = getEnvVars(run, listener); - setTags(DatadogUtilities.getBuildTags(run, envVars)); + this.tags = DatadogUtilities.getBuildTags(run, envVars); + + this.buildUrl = envVars.get("BUILD_URL"); + if (buildUrl == null) { + BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); + if (buildSpanAction != null) { + buildUrl = buildSpanAction.getBuildUrl(); + } + } // Populate instance using environment variables. populateEnvVariables(envVars); // Populate instance using Git info if possible. // Set all Git commit related variables. - if(isGit(envVars)){ - populateGitVariables(run, listener, envVars); - } + populateGitVariables(run); // Populate instance using run instance // Set StartTime, EndTime and Duration - long startTimeInMs = run.getStartTimeInMillis(); - setStartTime(startTimeInMs); + this.startTime = run.getStartTimeInMillis(); long durationInMs = run.getDuration(); - if (durationInMs == 0 && startTimeInMs != 0) { - durationInMs = System.currentTimeMillis() - startTimeInMs; + if (durationInMs == 0 && run.getStartTimeInMillis() != 0) { + durationInMs = System.currentTimeMillis() - run.getStartTimeInMillis(); } - setDuration(durationInMs); - if (durationInMs != 0 && startTimeInMs != 0) { - Long endTimeInMs = startTimeInMs + durationInMs; - setEndTime(endTimeInMs); + this.duration = durationInMs; + if (durationInMs != 0 && run.getStartTimeInMillis() != 0) { + this.endTime = run.getStartTimeInMillis() + durationInMs; } // Set Jenkins Url - setJenkinsUrl(DatadogUtilities.getJenkinsUrl()); + this.jenkinsUrl = DatadogUtilities.getJenkinsUrl(); // Set UserId - setUserId(getUserId(run)); + this.userId = getUserId(run); // Set UserEmail if(StringUtils.isEmpty(getUserEmail(""))){ - setUserEmail(getUserEmailByUserId(getUserId())); + this.userEmail = getUserEmailByUserId(getUserId()); } // Set Result and completed status - setResult(run.getResult() == null ? null : run.getResult().toString()); - setCompleted(run.getResult() != null && run.getResult().completeBuild); + this.result = run.getResult() == null ? null : run.getResult().toString(); + this.isCompleted = run.getResult() != null && run.getResult().completeBuild; // Set Build Number - setBuildNumber(String.valueOf(run.getNumber())); - // Set Hostname - setHostname(DatadogUtilities.getHostname(envVars)); + this.buildNumber = String.valueOf(run.getNumber()); + + final PipelineNodeInfoAction pipelineInfo = run.getAction(PipelineNodeInfoAction.class); + if (pipelineInfo != null && pipelineInfo.getNodeName() != null) { + this.nodeName = pipelineInfo.getNodeName(); + } else { + this.nodeName = envVars.get("NODE_NAME"); + } + + if (pipelineInfo != null && pipelineInfo.getNodeHostname() != null) { + // using the hostname determined during a pipeline step execution + // (this option is only available for pipelines, and not for freestyle builds) + this.hostname = pipelineInfo.getNodeHostname(); + } else if (DatadogUtilities.isMainNode(nodeName)) { + // the job is run on the master node, checking plugin config and locally available info. + // (nodeName == null) condition is there to preserve existing behavior + this.hostname = DatadogUtilities.getHostname(envVars); + } else if (envVars.containsKey(DatadogGlobalConfiguration.DD_CI_HOSTNAME)) { + // the job is run on an agent node, querying DD_CI_HOSTNAME set explicitly on agent + this.hostname = envVars.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME); + } else { + // the job is run on an agent node, querying HOSTNAME set implicitly on agent + this.hostname = envVars.get("HOSTNAME"); + } + + if (pipelineInfo != null && pipelineInfo.getWorkspace() != null) { + this.workspace = pipelineInfo.getWorkspace(); + } else { + this.workspace = envVars.get("WORKSPACE"); + } + + PipelineQueueInfoAction action = run.getAction(PipelineQueueInfoAction.class); + if (action != null) { + this.millisInQueue = action.getQueueTimeMillis(); + this.propagatedMillisInQueue = action.getPropagatedQueueTimeMillis(); + } + // Save charset canonical name - setCharset(run.getCharset()); + this.charsetName = run.getCharset().name(); String baseJobName = getBaseJobName(run, envVars); - setBaseJobName(normalizeJobName(baseJobName)); + this.baseJobName = normalizeJobName(baseJobName); String jobNameWithConfiguration = getJobName(run, envVars); - setJobName(normalizeJobName(jobNameWithConfiguration)); + this.jobName = normalizeJobName(jobNameWithConfiguration); // Set Jenkins Url String jenkinsUrl = DatadogUtilities.getJenkinsUrl(); @@ -207,7 +236,7 @@ public BuildData(Run run, TaskListener listener) throws IOException, Interrupted && !envVars.get("JENKINS_URL").isEmpty()) { jenkinsUrl = envVars.get("JENKINS_URL"); } - setJenkinsUrl(jenkinsUrl); + this.jenkinsUrl = jenkinsUrl; // Build parameters populateBuildParameters(run); @@ -215,14 +244,29 @@ public BuildData(Run run, TaskListener listener) throws IOException, Interrupted // Set Tracing IDs final TraceSpan buildSpan = BuildSpanManager.get().get(getBuildTag("")); if(buildSpan !=null) { - setTraceId(Long.toUnsignedString(buildSpan.context().getTraceId())); - setSpanId(Long.toUnsignedString(buildSpan.context().getSpanId())); + this.traceId = Long.toUnsignedString(buildSpan.context().getTraceId()); + this.spanId = Long.toUnsignedString(buildSpan.context().getSpanId()); + } + } + + private static EnvVars getEnvVars(Run run, TaskListener listener) throws IOException, InterruptedException { + EnvVars mergedVars = new EnvVars(); + + List envActions = run.getActions(EnvActionImpl.class); + for (EnvActionImpl envAction : envActions) { + EnvVars environment = envAction.getEnvironment(); + mergedVars.putAll(environment); } - BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); - if (buildSpanAction != null) { - getMissingGitValuesFrom(buildSpanAction.getBuildData()); + Map envVars; + if(listener != null){ + envVars = run.getEnvironment(listener); + }else{ + envVars = run.getEnvironment(new LogTaskListener(LOGGER, Level.INFO)); } + mergedVars.putAll(envVars); + + return mergedVars; } private static String getBaseJobName(Run run, EnvVars envVars) { @@ -270,45 +314,6 @@ private static String getJobName(Run run, EnvVars envVars) { return "unknown"; } - private void getMissingGitValuesFrom(BuildData previousData) { - if (branch == null) { - branch = previousData.branch; - } - if (gitUrl == null) { - gitUrl = previousData.gitUrl; - } - if (gitCommit == null) { - gitCommit = previousData.gitCommit; - } - if (gitMessage == null) { - gitMessage = previousData.gitMessage; - } - if (gitAuthorName == null) { - gitAuthorName = previousData.gitAuthorName; - } - if (gitAuthorEmail == null) { - gitAuthorEmail = previousData.gitAuthorEmail; - } - if (gitAuthorDate == null) { - gitAuthorDate = previousData.gitAuthorDate; - } - if (gitCommitterName == null) { - gitCommitterName = previousData.gitCommitterName; - } - if (gitCommitterEmail == null) { - gitCommitterEmail = previousData.gitCommitterEmail; - } - if (gitCommitterDate == null) { - gitCommitterDate = previousData.gitCommitterDate; - } - if (gitDefaultBranch == null) { - gitDefaultBranch = previousData.gitDefaultBranch; - } - if (gitTag == null) { - gitTag = previousData.gitTag; - } - } - private void populateBuildParameters(Run run) { // Build parameters can be defined via Jenkins UI // or via Jenkinsfile (https://www.jenkins.io/doc/book/pipeline/syntax/#parameters) @@ -342,100 +347,58 @@ private void populateEnvVariables(EnvVars envVars){ if (envVars == null) { return; } - setBuildId(envVars.get("BUILD_ID")); - setBuildUrl(envVars.get("BUILD_URL")); - setNodeName(envVars.get("NODE_NAME")); + this.buildId = envVars.get("BUILD_ID"); String envBuildTag = envVars.get("BUILD_TAG"); if (StringUtils.isNotBlank(envBuildTag)) { - setBuildTag(envBuildTag); + this.buildTag = envBuildTag; } else { - setBuildTag("jenkins-" + envVars.get("JOB_NAME") + "-" + envVars.get("BUILD_NUMBER")); + this.buildTag = "jenkins-" + envVars.get("JOB_NAME") + "-" + envVars.get("BUILD_NUMBER"); } - setExecutorNumber(envVars.get("EXECUTOR_NUMBER")); - setJavaHome(envVars.get("JAVA_HOME")); - setWorkspace(envVars.get("WORKSPACE")); + this.executorNumber = envVars.get("EXECUTOR_NUMBER"); + this.javaHome = envVars.get("JAVA_HOME"); if (isGit(envVars)) { - setBranch(GitUtils.resolveGitBranch(envVars, null)); - setGitUrl(GitUtils.resolveGitRepositoryUrl(envVars, null)); - setGitCommit(GitUtils.resolveGitCommit(envVars, null)); - setGitTag(GitUtils.resolveGitTag(envVars, null)); + this.branch = GitUtils.resolveGitBranch(envVars); + this.gitUrl = GitUtils.resolveGitRepositoryUrl(envVars); + this.gitCommit = GitUtils.resolveGitCommit(envVars); + this.gitTag = GitUtils.resolveGitTag(envVars); // Git data supplied by the user has prevalence. We set them first. // Only the data that has not been set will be updated later. // If any value is not provided, we maintained the original value if any. - setGitMessage(envVars.get(DD_GIT_COMMIT_MESSAGE, this.gitMessage)); - setGitAuthorName(envVars.get(DD_GIT_COMMIT_AUTHOR_NAME, this.gitAuthorName)); - setGitAuthorEmail(envVars.get(DD_GIT_COMMIT_AUTHOR_EMAIL, this.gitAuthorEmail)); - setGitAuthorDate(envVars.get(DD_GIT_COMMIT_AUTHOR_DATE, this.gitAuthorDate)); - setGitCommitterName(envVars.get(DD_GIT_COMMIT_COMMITTER_NAME, this.gitCommitterName)); - setGitCommitterEmail(envVars.get(DD_GIT_COMMIT_COMMITTER_EMAIL, this.gitCommitterEmail)); - setGitCommitterDate(envVars.get(DD_GIT_COMMIT_COMMITTER_DATE, this.gitCommitterDate)); + this.gitMessage = envVars.get(DD_GIT_COMMIT_MESSAGE, this.gitMessage); + this.gitAuthorName = envVars.get(DD_GIT_COMMIT_AUTHOR_NAME, this.gitAuthorName); + this.gitAuthorEmail = envVars.get(DD_GIT_COMMIT_AUTHOR_EMAIL, this.gitAuthorEmail); + this.gitAuthorDate = envVars.get(DD_GIT_COMMIT_AUTHOR_DATE, this.gitAuthorDate); + this.gitCommitterName = envVars.get(DD_GIT_COMMIT_COMMITTER_NAME, this.gitCommitterName); + this.gitCommitterEmail = envVars.get(DD_GIT_COMMIT_COMMITTER_EMAIL, this.gitCommitterEmail); + this.gitCommitterDate = envVars.get(DD_GIT_COMMIT_COMMITTER_DATE, this.gitCommitterDate); } else if (envVars.get("CVS_BRANCH") != null) { - setBranch(envVars.get("CVS_BRANCH")); + this.branch = envVars.get("CVS_BRANCH"); } - setPromotedUrl(envVars.get("PROMOTED_URL")); - setPromotedJobName(envVars.get("PROMOTED_JOB_NAME")); - setPromotedNumber(envVars.get("PROMOTED_NUMBER")); - setPromotedId(envVars.get("PROMOTED_ID")); - setPromotedTimestamp(envVars.get("PROMOTED_TIMESTAMP")); - setPromotedUserName(envVars.get("PROMOTED_USER_NAME")); - setPromotedUserId(envVars.get("PROMOTED_USER_ID")); - setPromotedJobFullName(envVars.get("PROMOTED_JOB_FULL_NAME")); + this.promotedUrl = envVars.get("PROMOTED_URL"); + this.promotedJobName = envVars.get("PROMOTED_JOB_NAME"); + this.promotedNumber = envVars.get("PROMOTED_NUMBER"); + this.promotedId = envVars.get("PROMOTED_ID"); + this.promotedTimestamp = envVars.get("PROMOTED_TIMESTAMP"); + this.promotedUserName = envVars.get("PROMOTED_USER_NAME"); + this.promotedUserId = envVars.get("PROMOTED_USER_ID"); + this.promotedJobFullName = envVars.get("PROMOTED_JOB_FULL_NAME"); } /** * Populate git commit related information in the BuildData instance. * @param run - * @param listener - * @param envVars */ - private void populateGitVariables(Run run, TaskListener listener, EnvVars envVars) { - // First we obtain the actions to check if the Git information was already calculated. - // If so, we want to use this information to avoid creating a new Git client instance - // to calculate the same information. - - boolean commitInfoAlreadyCreated = isCommitInfoAlreadyCreated(run, this.gitCommit); - boolean repositoryInfoAlreadyCreated = isRepositoryInfoAlreadyCreated(run, this.gitUrl); - + private void populateGitVariables(Run run) { GitRepositoryAction gitRepositoryAction = run.getAction(GitRepositoryAction.class); - if(repositoryInfoAlreadyCreated){ - this.gitDefaultBranch = gitRepositoryAction.getDefaultBranch(); - } + populateRepositoryInfo(gitRepositoryAction); - final GitCommitAction gitCommitAction = run.getAction(GitCommitAction.class); - if(commitInfoAlreadyCreated){ - populateCommitInfo(gitCommitAction); - } - - // If all Git info was already calculated, we finish the method here. - if(repositoryInfoAlreadyCreated && commitInfoAlreadyCreated) { - return; - } - - // At this point, there is some Git information that we need to calculate. - // We use the same Git client instance to calculate all git information - // because creating a Git client is a very expensive operation. - // Create a new Git client is a very expensive operation. - // Avoid creating Git clients as much as possible. - if(!isValidCommit(gitCommit) && !isValidRepositoryURL(this.gitUrl)) { - return; - } - - final GitClient gitClient = GitUtils.newGitClient(run, listener, envVars, this.nodeName, this.workspace); - if(isValidCommit(this.gitCommit)){ - populateCommitInfo(GitUtils.buildGitCommitAction(run, gitClient, this.gitCommit)); - } - - if(isValidRepositoryURL(this.gitUrl)){ - gitRepositoryAction = GitUtils.buildGitRepositoryAction(run, gitClient, envVars, this.gitUrl); - if(gitRepositoryAction != null) { - this.gitDefaultBranch = gitRepositoryAction.getDefaultBranch(); - } - } + GitCommitAction gitCommitAction = run.getAction(GitCommitAction.class); + populateCommitInfo(gitCommitAction); } /** @@ -449,32 +412,67 @@ private void populateCommitInfo(GitCommitAction gitCommitAction) { // the user supplied the value manually // via environment variables. + String existingCommit = getGitCommit(""); + if (!existingCommit.isEmpty() && !existingCommit.equals(gitCommitAction.getCommit())) { + // user-supplied commit is different + return; + } + + if(existingCommit.isEmpty()){ + this.gitCommit = gitCommitAction.getCommit(); + } + + if(getGitTag("").isEmpty()){ + this.gitTag = gitCommitAction.getTag(); + } + if(getGitMessage("").isEmpty()){ - setGitMessage(gitCommitAction.getMessage()); + this.gitMessage = gitCommitAction.getMessage(); } if(getGitAuthorName("").isEmpty()){ - setGitAuthorName(gitCommitAction.getAuthorName()); + this.gitAuthorName = gitCommitAction.getAuthorName(); } if(getGitAuthorEmail("").isEmpty()) { - setGitAuthorEmail(gitCommitAction.getAuthorEmail()); + this.gitAuthorEmail = gitCommitAction.getAuthorEmail(); } if(getGitAuthorDate("").isEmpty()){ - setGitAuthorDate(gitCommitAction.getAuthorDate()); + this.gitAuthorDate = gitCommitAction.getAuthorDate(); } if(getGitCommitterName("").isEmpty()){ - setGitCommitterName(gitCommitAction.getCommitterName()); + this.gitCommitterName = gitCommitAction.getCommitterName(); } if(getGitCommitterEmail("").isEmpty()){ - setGitCommitterEmail(gitCommitAction.getCommitterEmail()); + this.gitCommitterEmail = gitCommitAction.getCommitterEmail(); } if(getGitCommitterDate("").isEmpty()){ - setGitCommitterDate(gitCommitAction.getCommitterDate()); + this.gitCommitterDate = gitCommitAction.getCommitterDate(); + } + } + } + + private void populateRepositoryInfo(GitRepositoryAction gitRepositoryAction) { + if (gitRepositoryAction != null) { + if (gitUrl != null && !gitUrl.isEmpty() && !gitUrl.equals(gitRepositoryAction.getRepositoryURL())) { + // user-supplied URL is different + return; + } + + if (gitUrl == null || gitUrl.isEmpty()) { + gitUrl = gitRepositoryAction.getRepositoryURL(); + } + + if (gitDefaultBranch == null || gitDefaultBranch.isEmpty()) { + gitDefaultBranch = gitRepositoryAction.getDefaultBranch(); + } + + if (branch == null || branch.isEmpty()) { + this.branch = gitRepositoryAction.getBranch(); } } } @@ -558,35 +556,18 @@ public String getJobName(String value) { return defaultIfNull(jobName, value); } - public void setJobName(String jobName) { - this.jobName = jobName; - } - public String getBaseJobName(String value) { return defaultIfNull(baseJobName, value); } - public void setBaseJobName(String baseJobName) { - this.baseJobName = baseJobName; - } - public String getResult(String value) { return defaultIfNull(result, value); } - public void setResult(String result) { - this.result = result; - } - public boolean isCompleted() { return isCompleted; } - public void setCompleted(boolean completed) { - this.isCompleted = completed; - } - - public String getHostname(String value) { return defaultIfNull(hostname, value); } @@ -599,10 +580,6 @@ public String getBuildUrl(String value) { return defaultIfNull(buildUrl, value); } - public void setBuildUrl(String buildUrl) { - this.buildUrl = buildUrl; - } - public Charset getCharset() { if (charsetName != null) { // Will throw an exception if there is an issue with @@ -612,12 +589,6 @@ public Charset getCharset() { return Charset.defaultCharset(); } - public void setCharset(Charset charset) { - if (charset != null) { - this.charsetName = charset.name(); - } - } - public Map getBuildParameters() { return this.buildParameters; } @@ -626,82 +597,38 @@ public String getNodeName(String value) { return defaultIfNull(nodeName, value); } - public void setNodeName(String nodeName) { - this.nodeName = nodeName; - } - public String getBranch(String value) { return defaultIfNull(branch, value); } - public void setBranch(String branch) { - this.branch = branch; - } - public String getBuildNumber(String value) { return defaultIfNull(buildNumber, value); } - public void setBuildNumber(String buildNumber) { - this.buildNumber = buildNumber; - } - public Long getDuration(Long value) { return defaultIfNull(duration, value); } - public void setDuration(Long duration) { - this.duration = duration; - } - public Long getEndTime(Long value) { return defaultIfNull(endTime, value); } - public void setEndTime(Long endTime) { - this.endTime = endTime; - } - public Long getStartTime(Long value) { return defaultIfNull(startTime, value); } - public void setStartTime(Long startTime) { - this.startTime = startTime; - } - public Long getMillisInQueue(Long value) { return defaultIfNull(millisInQueue, value); } - public void setMillisInQueue(Long millisInQueue) { - this.millisInQueue = millisInQueue; - } - public Long getPropagatedMillisInQueue(Long value) { return defaultIfNull(propagatedMillisInQueue, value); } - public void setPropagatedMillisInQueue(Long propagatedMillisInQueue) { - this.propagatedMillisInQueue = propagatedMillisInQueue; - } - - public String getBuildId(String value) { - return defaultIfNull(buildId, value); - } - - public void setBuildId(String buildId) { - this.buildId = buildId; - } - public String getBuildTag(String value) { return defaultIfNull(buildTag, value); } - public void setBuildTag(String buildTag) { - this.buildTag = buildTag; - } - public String getJenkinsUrl(String value) { return defaultIfNull(jenkinsUrl, value); } @@ -714,42 +641,22 @@ public String getExecutorNumber(String value) { return defaultIfNull(executorNumber, value); } - public void setExecutorNumber(String executorNumber) { - this.executorNumber = executorNumber; - } - public String getJavaHome(String value) { return defaultIfNull(javaHome, value); } - public void setJavaHome(String javaHome) { - this.javaHome = javaHome; - } - public String getWorkspace(String value) { return defaultIfNull(workspace, value); } - public void setWorkspace(String workspace) { - this.workspace = workspace; - } - public String getGitUrl(String value) { return defaultIfNull(gitUrl, value); } - public void setGitUrl(String gitUrl) { - this.gitUrl = gitUrl; - } - public String getGitCommit(String value) { return defaultIfNull(gitCommit, value); } - public void setGitCommit(String gitCommit) { - this.gitCommit = gitCommit; - } - public String getGitMessage(String value) { return defaultIfNull(gitMessage, value); } @@ -818,82 +725,10 @@ public String getGitTag(String value) { return defaultIfNull(gitTag, value); } - public void setGitTag(String gitTag) { - this.gitTag = gitTag; - } - - public String getPromotedUrl(String value) { - return defaultIfNull(promotedUrl, value); - } - - public void setPromotedUrl(String promotedUrl) { - this.promotedUrl = promotedUrl; - } - - public String getPromotedJobName(String value) { - return defaultIfNull(promotedJobName, value); - } - - public void setPromotedJobName(String promotedJobName) { - this.promotedJobName = promotedJobName; - } - - public String getPromotedNumber(String value) { - return defaultIfNull(promotedNumber, value); - } - - public void setPromotedNumber(String promotedNumber) { - this.promotedNumber = promotedNumber; - } - - public String getPromotedId(String value) { - return defaultIfNull(promotedId, value); - } - - public void setPromotedId(String promotedId) { - this.promotedId = promotedId; - } - - public String getPromotedTimestamp(String value) { - return defaultIfNull(promotedTimestamp, value); - } - - public void setPromotedTimestamp(String promotedTimestamp) { - this.promotedTimestamp = promotedTimestamp; - } - - public String getPromotedUserName(String value) { - return defaultIfNull(promotedUserName, value); - } - - public void setPromotedUserName(String promotedUserName) { - this.promotedUserName = promotedUserName; - } - - public String getPromotedUserId(String value) { - return defaultIfNull(promotedUserId, value); - } - - public void setPromotedUserId(String promotedUserId) { - this.promotedUserId = promotedUserId; - } - - public String getPromotedJobFullName(String value) { - return defaultIfNull(promotedJobFullName, value); - } - - public void setPromotedJobFullName(String promotedJobFullName) { - this.promotedJobFullName = promotedJobFullName; - } - public String getUserId() { return userId; } - public void setUserId(String userId) { - this.userId = userId; - } - private String getUserId(Run run) { if (promotedUserId != null){ return promotedUserId; @@ -972,18 +807,6 @@ private String getUserEmailByUserId(String userId) { } } - public void setUserEmail(final String userEmail) { - this.userEmail = userEmail; - } - - public void setTraceId(String traceId) { - this.traceId = traceId; - } - - public void setSpanId(String spanId) { - this.spanId = spanId; - } - public JSONObject addLogAttributes(){ JSONObject payload = new JSONObject(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java deleted file mode 100644 index c13f7ea95..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java +++ /dev/null @@ -1,230 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import static org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode.BuildPipelineNodeKey; - -import org.datadog.jenkins.plugins.datadog.traces.CITags; -import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; -import org.jenkinsci.plugins.workflow.graph.BlockEndNode; -import org.jenkinsci.plugins.workflow.graph.BlockStartNode; -import org.jenkinsci.plugins.workflow.graph.FlowNode; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Represents a Jenkins Pipeline. - * The stages are represented using an n-ary tree. - */ -public class BuildPipeline { - - private final Map, BuildPipelineNode> stagesByPath; - private BuildPipelineNode root; - - public BuildPipeline() { - this.stagesByPath = new HashMap<>(); - this.root = new BuildPipelineNode("initial", "initial"); - } - - public BuildPipelineNode add(final FlowNode node) { - final BuildPipelineNode buildNode = buildPipelineNode(node); - if(buildNode == null) { - return null; - } - - final List buildNodeRelations = new ArrayList<>(); - buildNodeRelations.add(buildNode.getKey()); - for (final BlockStartNode startNode : node.iterateEnclosingBlocks()) { - buildNodeRelations.add(new BuildPipelineNodeKey(startNode.getId(), startNode.getDisplayName())); - } - - Collections.reverse(buildNodeRelations); - return stagesByPath.put(buildNodeRelations, buildNode); - } - - private BuildPipelineNode buildPipelineNode(FlowNode node) { - if(node instanceof BlockEndNode) { - return new BuildPipelineNode((BlockEndNode) node); - } else if(node instanceof StepAtomNode) { - return new BuildPipelineNode((StepAtomNode) node); - } - return null; - } - - /** - * Reconstruct a Jenkins pipeline tree from the info gathered in the {@code DatadogGraphListener}. - * Example: - * Starting from the stagesByPath: - * Key: (Stage1, Stage2) - Value: (Stage2) - * Key: (Stage1, Stage2, Stage3) - Value: (Stage3) - * Key: (Stage1) - Value: (Stage1) - * it will be returned the following tree: - * root - * -- stage1 - * -- stage2 - * -- stage3 - * @return the build pipeline tree. - **/ - public BuildPipelineNode buildTree() { - for(Map.Entry, BuildPipelineNode> entry : stagesByPath.entrySet()){ - final List pathStages = entry.getKey(); - final BuildPipelineNode stage = entry.getValue(); - buildTree(pathStages, root, stage); - } - - sortSiblingsByStartTime(root.getChildren()); - completeInformation(root.getChildren(), root); - assignPipelineToRootNode(root); - return root; - } - - private void assignPipelineToRootNode(BuildPipelineNode root) { - final List children = root.getChildren(); - if(children.size() == 1) { - this.root = children.get(0); - } - } - - private void sortSiblingsByStartTime(List stages) { - for(BuildPipelineNode stage : stages) { - sortSiblingsByStartTime(stage.getChildren()); - } - stages.sort(new BuildPipelineNode.BuildPipelineNodeComparator()); - } - - private void completeInformation(final List nodes, final BuildPipelineNode parent) { - for(int i = 0; i < nodes.size(); i++) { - final BuildPipelineNode node = nodes.get(i); - final Long endTime = node.getEndTime(); - if(endTime == -1L) { - if(i + 1 < nodes.size()) { - final BuildPipelineNode sibling = nodes.get(i + 1); - node.setEndTime(sibling.getStartTime()); - } else { - node.setEndTime(parent.getEndTime()); - } - } - - // Propagate Stage Name to its children - if(!BuildPipelineNode.NodeType.STAGE.equals(node.getType())) { - if(BuildPipelineNode.NodeType.STAGE.equals(parent.getType())) { - node.setStageName(parent.getName()); - node.setStageId(parent.getId()); - } else if(parent.getStageName() != null){ - node.setStageName(parent.getStageName()); - node.setStageId(parent.getStageId()); - } - } - - // Propagate queue time from "Allocate node" child: - // If the node is the initial (Start of Pipeline) or is a Stage, - // we need to propagate the queue time stored in its child node ("Allocate node"). - // This is necessary because the stage/pipeline node does not have the queue time itself, - // but it's stored in the "Allocate node" which is its child. - if((node.isInitial() || BuildPipelineNode.NodeType.STAGE.equals(node.getType())) && node.getChildren().size() == 1){ - BuildPipelineNode child = node.getChildren().get(0); - if(child.getName().contains("Allocate node")) { - node.setPropagatedNanosInQueue(child.getNanosInQueue()); - } - } - - - // Propagate worker node name from the executable child node - // (where the worker node info is available) to its stage. - if(BuildPipelineNode.NodeType.STAGE.equals(node.getType())) { - final BuildPipelineNode executableChildNode = searchExecutableChildNode(node); - if(executableChildNode != null) { - node.setNodeName(executableChildNode.getNodeName()); - node.setNodeLabels(executableChildNode.getNodeLabels()); - node.setNodeHostname(executableChildNode.getNodeHostname()); - } - } - - // Propagate error to all parent stages - if(node.isError() && !parent.isError()) { - propagateResultToAllParents(node, CITags.STATUS_ERROR, false); - } else if(node.isUnstable() && !parent.isUnstable()) { - propagateResultToAllParents(node, CITags.STATUS_UNSTABLE, false); - } - - // Notice we cannot propagate the worker node info - // to the root span at this point, because this method is executed - // after the root span is sent. To propagate worker node info - // to the root span, we use the PipelineNodeInfoAction that is populated - // in the DatadogStepListener class. - - completeInformation(node.getChildren(), node); - } - } - - private void propagateResultToAllParents(BuildPipelineNode node, String result, boolean stopAtFirstNonInternalNode) { - // propagating "error" status is different from propagating "unstable", - // since error can be caught and suppressed - if (CITags.STATUS_ERROR.equals(result)) { - if (node.getCatchErrorResult() != null) { - // encountered a "catchError" or a "warnError" block; - // will propagate the updated result to the first visible (non-internal) node, and then stop - result = node.getCatchErrorResult(); - stopAtFirstNonInternalNode = true; - } else if (node.getErrorObj() == null && !stopAtFirstNonInternalNode) { - // most likely a "catch" block in a scripted pipeline - return; - } - } - - node.setResult(result); - - if (!node.isInternal() && stopAtFirstNonInternalNode) { - return; - } - - for(BuildPipelineNode parent : node.getParents()) { - propagateResultToAllParents(parent, result, stopAtFirstNonInternalNode); - } - } - - private BuildPipelineNode searchExecutableChildNode(BuildPipelineNode node) { - if(!node.isInternal() && BuildPipelineNode.NodeType.STEP.equals(node.getType())){ - return node; - }else if ("Stage : Start".equalsIgnoreCase(node.getName())) { - // If we find a "Stage : Start" as child, we need to stop searching - // because we're changing the Stage, so the executable child node - // will not belong to the required stage. - return null; - } else { - for(BuildPipelineNode child : node.getChildren()){ - final BuildPipelineNode found = searchExecutableChildNode(child); - if(found != null) { - return found; - } - } - } - return null; - } - - private void buildTree(List pathStages, BuildPipelineNode parent, BuildPipelineNode stage) { - if(pathStages.isEmpty()) { - return; - } - - final BuildPipelineNodeKey buildNodeKey = pathStages.get(0); - if(pathStages.size() == 1){ - final BuildPipelineNode child = parent.getChild(buildNodeKey); - if (child == null) { - parent.addChild(stage); - } else { - child.updateData(stage); - } - - } else { - BuildPipelineNode child = parent.getChild(buildNodeKey); - if(child == null) { - child = new BuildPipelineNode(buildNodeKey); - parent.addChild(child); - } - buildTree(pathStages.subList(1, pathStages.size()), child, stage); - } - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java index e77973bb2..479f0bdb8 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java @@ -1,41 +1,27 @@ package org.datadog.jenkins.plugins.datadog.model; -import hudson.console.AnnotatedLargeText; import hudson.model.Run; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.traces.CITags; -import org.datadog.jenkins.plugins.datadog.traces.StepDataAction; -import org.datadog.jenkins.plugins.datadog.traces.StepTraceDataAction; -import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; +import org.datadog.jenkins.plugins.datadog.model.node.DequeueAction; +import org.datadog.jenkins.plugins.datadog.model.node.NodeInfoAction; +import org.datadog.jenkins.plugins.datadog.model.node.StatusAction; +import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; +import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.jenkinsci.plugins.workflow.actions.ArgumentsAction; -import org.jenkinsci.plugins.workflow.actions.ErrorAction; -import org.jenkinsci.plugins.workflow.actions.LogAction; -import org.jenkinsci.plugins.workflow.actions.TimingAction; import org.jenkinsci.plugins.workflow.actions.WarningAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.graph.BlockEndNode; import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import org.jenkinsci.plugins.workflow.graph.StepNode; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.logging.Logger; /** * Represent a stage of the Jenkins Pipeline. */ public class BuildPipelineNode { - private static final Logger logger = Logger.getLogger(BuildPipelineNode.class.getName()); - public enum NodeType { PIPELINE("ci.pipeline", "pipeline"), STAGE("ci.stage", "stage"), @@ -58,148 +44,137 @@ public String getBuildLevel() { } } - private final BuildPipelineNodeKey key; - private final List parents; - private final List children; - private final String id; - private final String name; + private String id; + private String name; private String stageId; private String stageName; private NodeType type; - private boolean internal; - private boolean initial; - private Map args = new HashMap<>(); - private Map envVars = new HashMap<>(); + private Map args; private String workspace; private String nodeName; private Set nodeLabels; private String nodeHostname; - private long startTime; private long startTimeMicros; - private long endTime; private long endTimeMicros; - private long nanosInQueue = -1L; - private long propagatedNanosInQueue = -1L; - private String result; + private long nanosInQueue; + private String jenkinsResult; + private Status status; - // If the node is a `catchError` block, this field will contain the `stageResult` parameter - private String catchErrorResult; // Throwable of the node. // Although the error flag was true, this can be null. private Throwable errorObj; private String unstableMessage; //Tracing - private long spanId = -1L; - - public BuildPipelineNode(final String id, final String name) { - this(new BuildPipelineNodeKey(id, name)); - } + private long spanId; + private long parentSpanId = -1; + private long traceId; - public BuildPipelineNode(final BuildPipelineNodeKey key) { - this.key = key; - this.parents = new ArrayList<>(); - this.children = new ArrayList<>(); - this.id = key.id; - this.name = key.name; - } + public BuildPipelineNode(final Run run, final BlockStartNode startNode, final BlockEndNode endNode) { + this(run, startNode); - public BuildPipelineNode(final BlockEndNode endNode) { - final BlockStartNode startNode = endNode.getStartNode(); - this.key = new BuildPipelineNodeKey(startNode.getId(), startNode.getDisplayName()); - this.parents = new ArrayList<>(); - this.children = new ArrayList<>(); + this.type = NodeType.STAGE; - this.id = startNode.getId(); - this.name = startNode.getDisplayName(); - if(DatadogUtilities.isPipelineNode(endNode)) { - // The pipeline node must be treated as Step. - // Only root span must have ci.pipeline.* tags. - // https://datadoghq.atlassian.net/browse/CIAPP-190 - // The pipeline node is not the root span. - // In Jenkins, the build span is the root span, and - // the pipeline node span is a child of the build span. - this.type = NodeType.STEP; - this.internal = true; - this.initial = true; - } else if(DatadogUtilities.isStageNode(startNode)){ - this.type = NodeType.STAGE; - this.internal = false; - } else{ - this.type = NodeType.STEP; - this.internal = true; - } - - this.catchErrorResult = DatadogUtilities.getCatchErrorResult(startNode); - this.args = ArgumentsAction.getFilteredArguments(startNode); - - if(endNode instanceof StepNode){ - final StepData stepData = getStepData(startNode); - if(stepData != null) { - this.envVars = stepData.getEnvVars(); - this.workspace = stepData.getWorkspace(); - this.nodeName = stepData.getNodeName(); - this.nodeHostname = stepData.getNodeHostname(); - this.nodeLabels = stepData.getNodeLabels(); - } + this.startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(startNode)); + if (startTimeMicros < 0) { + throw new IllegalStateException("Step " + startNode.getId() + " (" + startNode.getDisplayName() + ") has no start time info"); } - final FlowNodeQueueData queueData = getQueueData(startNode); - if(queueData != null) { - this.nanosInQueue = queueData.getNanosInQueue(); + this.endTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(endNode)); + if (endTimeMicros < 0) { + throw new IllegalStateException("Step " + endNode.getId() + " (" + endNode.getDisplayName() + ") has no end time info"); } - this.startTime = getTime(startNode); - this.startTimeMicros = this.startTime * 1000; - this.endTime = getTime(endNode); - this.endTimeMicros = this.endTime * 1000; - this.result = DatadogUtilities.getResultTag(startNode); - this.errorObj = getErrorObj(endNode); + this.jenkinsResult = DatadogUtilities.getResultTag(endNode); + this.status = getStatus(startNode, jenkinsResult); + this.errorObj = DatadogUtilities.getErrorObj(endNode); this.unstableMessage = getUnstableMessage(startNode); + + NodeInfoAction nodeInfoAction = startNode.getAction(NodeInfoAction.class); + if (nodeInfoAction != null) { + this.nodeName = nodeInfoAction.getNodeName(); + this.nodeHostname = nodeInfoAction.getNodeHostname(); + this.nodeLabels = nodeInfoAction.getNodeLabels(); + this.workspace = nodeInfoAction.getNodeWorkspace(); + } } - public BuildPipelineNode(final StepAtomNode stepNode) { - this.key = new BuildPipelineNodeKey(stepNode.getId(), stepNode.getDisplayName()); - this.parents = new ArrayList<>(); - this.children = new ArrayList<>(); - this.internal = false; - this.id = stepNode.getId(); - this.name = stepNode.getDisplayName(); + public BuildPipelineNode(final Run run, final StepAtomNode stepNode, final FlowNode nextNode) { + this(run, stepNode); + this.type = NodeType.STEP; - this.args = ArgumentsAction.getFilteredArguments(stepNode); - - final StepData stepData = getStepData(stepNode); - if(stepData != null) { - this.envVars = stepData.getEnvVars(); - this.workspace = stepData.getWorkspace(); - this.nodeName = stepData.getNodeName(); - this.nodeHostname = stepData.getNodeHostname(); - this.nodeLabels = stepData.getNodeLabels(); - } - final StepTraceData stepTraceData = getStepTraceData(stepNode); - if(stepTraceData != null) { - this.spanId = stepTraceData.getSpanId(); + this.startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(stepNode)); + if (startTimeMicros < 0) { + throw new IllegalStateException("Step " + stepNode.getId() + " (" + stepNode.getDisplayName() + ") has no start time info"); } - final FlowNodeQueueData queueData = getQueueData(stepNode); - if(queueData != null) { - this.nanosInQueue = queueData.getNanosInQueue(); + this.endTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(nextNode)); + if (endTimeMicros < 0) { + throw new IllegalStateException("Step " + nextNode.getId() + " (" + nextNode.getDisplayName() + ") has no time info"); } - this.startTime = getTime(stepNode); - this.startTimeMicros = this.startTime * 1000; - this.endTime = -1L; - this.endTimeMicros = this.endTime * 1000; - this.result = DatadogUtilities.getResultTag(stepNode); - this.errorObj = getErrorObj(stepNode); + this.jenkinsResult = DatadogUtilities.getResultTag(stepNode); + this.status = getStatus(stepNode, jenkinsResult); + this.errorObj = DatadogUtilities.getErrorObj(stepNode); this.unstableMessage = getUnstableMessage(stepNode); + + BlockStartNode enclosingStage = DatadogUtilities.getEnclosingStageNode(stepNode); + if (enclosingStage != null) { + NodeInfoAction enclosingStageInfoAction = enclosingStage.getAction(NodeInfoAction.class); + if (enclosingStageInfoAction != null) { + this.nodeName = enclosingStageInfoAction.getNodeName(); + this.nodeHostname = enclosingStageInfoAction.getNodeHostname(); + this.nodeLabels = enclosingStageInfoAction.getNodeLabels(); + this.workspace = enclosingStageInfoAction.getNodeWorkspace(); + } + } } + private BuildPipelineNode(final Run run, FlowNode startNode) { + TraceInfoAction traceInfoAction = run.getAction(TraceInfoAction.class); + if (traceInfoAction != null) { + Long spanId = traceInfoAction.removeOrCreate(startNode.getId()); + if (spanId != null) { + this.spanId = spanId; + } + } else { + throw new IllegalStateException("Step " + startNode.getId() + " (" + startNode.getDisplayName() + ") has no span info." + + "It is possible that CI Visibility was enabled while this step was in progress"); + } + + BlockStartNode enclosingStage = DatadogUtilities.getEnclosingStageNode(startNode); + if (enclosingStage != null) { + this.stageId = enclosingStage.getId(); + this.stageName = enclosingStage.getDisplayName(); + + Long parentSpanId = traceInfoAction.getOrCreate(enclosingStage.getId()); + if (parentSpanId != null) { + this.parentSpanId = parentSpanId; + } + } + + BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); + if (buildSpanAction != null) { + TraceSpan.TraceSpanContext traceContext = buildSpanAction.getBuildSpanContext(); + this.traceId = traceContext.getTraceId(); + if (this.parentSpanId == -1) { + this.parentSpanId = traceContext.getSpanId(); + } + } else { + throw new IllegalStateException("Step " + startNode.getId() + " (" + startNode.getDisplayName() + ") has no trace info." + + "It is possible that CI Visibility was enabled while this step was in progress"); + } + + DequeueAction queueInfoAction = startNode.getAction(DequeueAction.class); + if (queueInfoAction != null) { + this.nanosInQueue = queueInfoAction.getQueueTimeNanos(); + } - public BuildPipelineNodeKey getKey() { - return key; + this.id = startNode.getId(); + this.name = startNode.getDisplayName(); + this.args = ArgumentsAction.getFilteredArguments(startNode); } public String getId() { @@ -214,34 +189,14 @@ public String getStageId() { return stageId; } - public void setStageId(String stageId) { - this.stageId = stageId; - } - public String getStageName() { return stageName; } - public void setStageName(String stageName) { - this.stageName = stageName; - } - - public boolean isInternal() { - return internal; - } - - public boolean isInitial() { - return initial; - } - public Map getArgs() { return args; } - public Map getEnvVars() { - return envVars; - } - public String getWorkspace() { return workspace; } @@ -254,30 +209,10 @@ public Set getNodeLabels() { return nodeLabels; } - public void setNodeName(String propagatedNodeName) { - this.nodeName = propagatedNodeName; - } - - public void setNodeLabels(final Set propagatedNodeLabels) { - this.nodeLabels = propagatedNodeLabels; - } - public String getNodeHostname() { return nodeHostname; } - public void setNodeHostname(final String propagatedNodeHostname) { - this.nodeHostname = propagatedNodeHostname; - } - - public long getStartTime() { - return startTime; - } - - public long getEndTime() { - return endTime; - } - public long getStartTimeMicros() { return startTimeMicros; } @@ -290,29 +225,12 @@ public long getNanosInQueue() { return nanosInQueue; } - public void setEndTime(long endTime) { - this.endTime = endTime; - this.endTimeMicros = TimeUnit.MILLISECONDS.toMicros(this.endTime); - } - - public void setNanosInQueue(long nanosInQueue) { - this.nanosInQueue = nanosInQueue; + public String getJenkinsResult() { + return jenkinsResult; } - public long getPropagatedNanosInQueue() { - return propagatedNanosInQueue; - } - - public void setPropagatedNanosInQueue(long propagatedNanosInQueue) { - this.propagatedNanosInQueue = propagatedNanosInQueue; - } - - public String getResult() { - return result; - } - - public void setResult(final String propagatedResult) { - this.result = propagatedResult; + public Status getStatus() { + return status; } public Throwable getErrorObj() { @@ -324,245 +242,42 @@ public String getUnstableMessage() { } public boolean isError() { - return CITags.STATUS_ERROR.equalsIgnoreCase(this.result); + return status == Status.ERROR; } public boolean isUnstable() { - return CITags.STATUS_UNSTABLE.equalsIgnoreCase(this.result); + return status == Status.UNSTABLE; } public long getSpanId() { return spanId; } - public List getParents(){ return parents; } - - public List getChildren() { - return children; + public long getParentSpanId() { + return parentSpanId; } - public BuildPipelineNode getChild(final BuildPipelineNodeKey id) { - if(children.isEmpty()) { - return null; - } - - for(final BuildPipelineNode child : children) { - if(id.equals(child.getKey())){ - return child; - } - } - - return null; + public long getTraceId() { + return traceId; } public NodeType getType() { return type; } - public String getCatchErrorResult() { - return catchErrorResult; - } - - // Used during the tree is being built in BuildPipeline class. - public void updateData(final BuildPipelineNode buildNode) { - this.stageName = buildNode.stageName; - this.stageId = buildNode.stageId; - this.type = buildNode.type; - this.internal = buildNode.internal; - this.initial = buildNode.initial; - this.args = buildNode.args; - this.envVars = buildNode.envVars; - this.workspace = buildNode.workspace; - this.nodeName = buildNode.nodeName; - this.nodeHostname = buildNode.nodeHostname; - this.nodeLabels = buildNode.nodeLabels; - this.startTime = buildNode.startTime; - this.startTimeMicros = buildNode.startTimeMicros; - this.endTime = buildNode.endTime; - this.endTimeMicros = buildNode.endTimeMicros; - this.nanosInQueue = buildNode.nanosInQueue; - this.result = buildNode.result; - this.catchErrorResult = buildNode.catchErrorResult; - this.errorObj = buildNode.errorObj; - this.unstableMessage = buildNode.unstableMessage; - this.parents.addAll(buildNode.parents); - this.spanId = buildNode.spanId; - } - - public void addChild(final BuildPipelineNode child) { - children.add(child); - child.parents.add(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - BuildPipelineNode that = (BuildPipelineNode) o; - return Objects.equals(key, that.key); - } - - @Override - public int hashCode() { - return Objects.hash(key); - } - - - /** - * Returns the startTime of a certain {@code FlowNode}, if it has time information. - * @param flowNode - * @return startTime of the flowNode in milliseconds. - */ - private static long getTime(FlowNode flowNode) { - TimingAction time = flowNode.getAction(TimingAction.class); - if(time != null) { - return time.getStartTime(); - } - return -1L; - } - - /** - * Returns the accessor to the logs of a certain {@code FlowNode}, if it has logs. - * @param flowNode - * @return accessor to the flowNode logs. - */ - private static AnnotatedLargeText getLogText(FlowNode flowNode) { - final LogAction logAction = flowNode.getAction(LogAction.class); - if(logAction != null) { - return logAction.getLogText(); - } - return null; - } - - /** - * Returns the {@code Throwable} of a certain {@code FlowNode}, if it has errors. - * @param flowNode - * @return throwable associated with a certain flowNode. - */ - private static Throwable getErrorObj(FlowNode flowNode) { - final ErrorAction errorAction = flowNode.getAction(ErrorAction.class); - return (errorAction != null) ? errorAction.getError() : null; + private static Status getStatus(FlowNode node, String jenkinsResult) { + Status nodeStatus = Status.fromJenkinsResult(jenkinsResult); + StatusAction statusAction = node.getAction(StatusAction.class); + return statusAction != null ? Status.combine(nodeStatus, statusAction.getStatus()) : nodeStatus; } /** * Returns the error message for unstable pipelines - * @param flowNode + * * @return error message */ private static String getUnstableMessage(FlowNode flowNode) { final WarningAction warningAction = flowNode.getAction(WarningAction.class); return (warningAction != null) ? warningAction.getMessage() : null; } - - @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") - private StepData getStepData(final FlowNode flowNode) { - final Run run = getRun(flowNode); - if(run == null) { - logger.fine("Unable to get StepData from flowNode '"+flowNode.getDisplayName()+"'. Run is null"); - return null; - } - - final StepDataAction stepDataAction = run.getAction(StepDataAction.class); - if(stepDataAction == null) { - logger.fine("Unable to get StepData from flowNode '"+flowNode.getDisplayName()+"'. StepDataAction is null"); - return null; - } - - return stepDataAction.get(run, flowNode); - } - - @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") - private StepTraceData getStepTraceData(FlowNode flowNode) { - final Run run = getRun(flowNode); - if(run == null) { - logger.fine("Unable to get StepTraceData from flowNode '"+flowNode.getDisplayName()+"'. Run is null"); - return null; - } - - final StepTraceDataAction stepTraceDataAction = run.getAction(StepTraceDataAction.class); - if(stepTraceDataAction == null) { - logger.fine("Unable to get StepTraceData from flowNode '"+flowNode.getDisplayName()+"'. StepTraceDataAction is null"); - return null; - } - - return stepTraceDataAction.get(run, flowNode); - } - - private FlowNodeQueueData getQueueData(FlowNode node) { - final Run run = getRun(node); - if(run == null) { - logger.fine("Unable to get QueueData from node '"+node.getDisplayName()+"'. Run is null"); - return null; - } - - PipelineQueueInfoAction pipelineQueueInfoAction = run.getAction(PipelineQueueInfoAction.class); - if (pipelineQueueInfoAction == null) { - logger.fine("Unable to get QueueInfoAction from node '"+node.getDisplayName()+"'. QueueInfoAction is null"); - return null; - } - - return pipelineQueueInfoAction.get(run, node.getId()); - } - - private Run getRun(final FlowNode node) { - if(node == null || node.getExecution() == null || node.getExecution().getOwner() == null) { - return null; - } - - try { - return (Run) node.getExecution().getOwner().getExecutable(); - } catch (Exception e){ - return null; - } - } - - public static class BuildPipelineNodeKey { - private final String id; - private final String name; - - public BuildPipelineNodeKey(final String stageId, final String stageName) { - this.id = stageId; - this.name = stageName; - } - - public String getId() { - return id; - } - - public String getName() { - return name; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - BuildPipelineNodeKey that = (BuildPipelineNodeKey) o; - return Objects.equals(id, that.id) && - Objects.equals(name, that.name); - } - - @Override - public int hashCode() { - return Objects.hash(id, name); - } - } - - - static class BuildPipelineNodeComparator implements Comparator, Serializable { - - @Override - public int compare(BuildPipelineNode o1, BuildPipelineNode o2) { - if(o1.getStartTime() == -1L || o2.getStartTime() == -1L) { - return 0; - } - - if(o1.getStartTime() < o2.getStartTime()) { - return -1; - } else if (o1.getStartTime() > o2.getStartTime()){ - return 1; - } - return 0; - } - } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java deleted file mode 100644 index 986496ec6..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import hudson.model.InvisibleAction; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; - -public class CIGlobalTagsAction extends InvisibleAction implements Serializable { - - private final Map tags; - - public CIGlobalTagsAction(final Map tags) { - this.tags = tags != null ? tags : new HashMap<>(); - } - - public Map getTags() { - return tags; - } - - public void putAll(Map tags) { - this.tags.putAll(tags); - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java new file mode 100644 index 000000000..88cb56657 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java @@ -0,0 +1,10 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import hudson.model.InvisibleAction; +import java.io.Serializable; + +/** + * Marker interface for all actions that are added by the plugin + */ +public abstract class DatadogPluginAction extends InvisibleAction implements Serializable { +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java deleted file mode 100644 index b9f697030..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java +++ /dev/null @@ -1,44 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import java.io.Serializable; - -/** - * Keeps the timestamps of a certain FlowNode based on the onEnterBuildable and onLeaveBuildable callbacks. - */ -public class FlowNodeQueueData implements Serializable { - - private static final long serialVersionUID = 1L; - - private final String nodeId; - private long enterBuildableNanos; - private long leaveBuildableNanos; - private long queueTimeNanos = -1L; - - public FlowNodeQueueData(final String nodeId) { - this.nodeId = nodeId; - } - - public void setEnterBuildableNanos(long timestampNanos) { - this.enterBuildableNanos = timestampNanos; - } - - public void setLeaveBuildableNanos(long timestampNanos) { - this.leaveBuildableNanos = timestampNanos; - this.queueTimeNanos = this.leaveBuildableNanos - this.enterBuildableNanos; - } - - public long getNanosInQueue() { - return this.queueTimeNanos; - } - - @Override - public String toString() { - final StringBuilder sb = new StringBuilder("FlowNodeQueueData{"); - sb.append("nodeId='").append(nodeId).append('\''); - sb.append(", enterBuildableNanos=").append(enterBuildableNanos); - sb.append(", leaveBuildableNanos=").append(leaveBuildableNanos); - sb.append(", queueTimeNanos=").append(queueTimeNanos); - sb.append('}'); - return sb.toString(); - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java index b516cf6a0..5311d2442 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java @@ -1,126 +1,234 @@ package org.datadog.jenkins.plugins.datadog.model; -import hudson.model.InvisibleAction; - -import java.io.Serializable; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; /** * Keeps the Git commit related information. */ -public class GitCommitAction extends InvisibleAction implements Serializable { +public class GitCommitAction extends DatadogPluginAction { private static final long serialVersionUID = 1L; - private final String commit; - private final String message; - private final String authorName; - private final String authorEmail; - private final String authorDate; - private final String committerName; - private final String committerEmail; - private final String committerDate; - - private GitCommitAction(Builder builder) { - this.commit = builder.commit; - this.message = builder.message; - this.authorName = builder.authorName; - this.authorEmail = builder.authorEmail; - this.authorDate = builder.authorDate; - this.committerName = builder.committerName; - this.committerEmail = builder.committerEmail; - this.committerDate = builder.committerDate; - } - - public static Builder newBuilder() { - return new Builder(); - } - - public static class Builder { - private String commit; - private String message; - private String authorName; - private String authorEmail; - private String authorDate; - private String committerName; - private String committerEmail; - private String committerDate; - - private Builder(){} - - public Builder withCommit(final String commit) { - this.commit = commit; - return this; - } - - public Builder withMessage(final String message) { - this.message = message; - return this; - } - - public Builder withAuthorName(final String authorName) { - this.authorName = authorName; - return this; - } - - public Builder withAuthorEmail(final String authorEmail) { - this.authorEmail = authorEmail; - return this; - } - - public Builder withAuthorDate(final String authorDate) { - this.authorDate = authorDate; - return this; - } - - public Builder withCommitterName(final String committerName){ - this.committerName = committerName; - return this; - } + private volatile String tag; + private volatile String commit; + private volatile String message; + private volatile String authorName; + private volatile String authorEmail; + private volatile String authorDate; + private volatile String committerName; + private volatile String committerEmail; + private volatile String committerDate; + + public GitCommitAction() { + } - public Builder withCommitterEmail(final String committerEmail) { - this.committerEmail = committerEmail; - return this; - } + public GitCommitAction(String tag, String commit, String message, String authorName, String authorEmail, String authorDate, String committerName, String committerEmail, String committerDate) { + this.tag = tag; + this.commit = commit; + this.message = message; + this.authorName = authorName; + this.authorEmail = authorEmail; + this.authorDate = authorDate; + this.committerName = committerName; + this.committerEmail = committerEmail; + this.committerDate = committerDate; + } - public Builder withCommitterDate(final String committerDate) { - this.committerDate = committerDate; - return this; - } + public String getTag() { + return tag; + } - public GitCommitAction build() { - return new GitCommitAction(this); - } + public void setTag(String tag) { + this.tag = tag; } public String getCommit() { return commit; } + public void setCommit(String commit) { + this.commit = commit; + } + public String getMessage() { return message; } + public void setMessage(String message) { + this.message = message; + } + public String getAuthorName() { return authorName; } + public void setAuthorName(String authorName) { + this.authorName = authorName; + } + public String getAuthorEmail() { return authorEmail; } + public void setAuthorEmail(String authorEmail) { + this.authorEmail = authorEmail; + } + public String getAuthorDate() { return authorDate; } + public void setAuthorDate(String authorDate) { + this.authorDate = authorDate; + } + public String getCommitterName() { return committerName; } + public void setCommitterName(String committerName) { + this.committerName = committerName; + } + public String getCommitterEmail() { return committerEmail; } + public void setCommitterEmail(String committerEmail) { + this.committerEmail = committerEmail; + } + public String getCommitterDate() { return committerDate; } + + public void setCommitterDate(String committerDate) { + this.committerDate = committerDate; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitCommitAction that = (GitCommitAction) o; + return Objects.equals(tag, that.tag) && Objects.equals(commit, that.commit) && Objects.equals(message, that.message) && Objects.equals(authorName, that.authorName) && Objects.equals(authorEmail, that.authorEmail) && Objects.equals(authorDate, that.authorDate) && Objects.equals(committerName, that.committerName) && Objects.equals(committerEmail, that.committerEmail) && Objects.equals(committerDate, that.committerDate); + } + + @Override + public int hashCode() { + return Objects.hash(tag, commit, message, authorName, authorEmail, authorDate, committerName, committerEmail, committerDate); + } + + @Override + public String toString() { + return "GitCommitAction{" + + "tag='" + tag + '\'' + + ", commit='" + commit + '\'' + + ", message='" + message + '\'' + + ", authorName='" + authorName + '\'' + + ", authorEmail='" + authorEmail + '\'' + + ", authorDate='" + authorDate + '\'' + + ", committerName='" + committerName + '\'' + + ", committerEmail='" + committerEmail + '\'' + + ", committerDate='" + committerDate + '\'' + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return GitCommitAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + GitCommitAction action = (GitCommitAction) source; + if (action.tag != null) { + writeField("tag", action.tag, writer, context); + } + if (action.commit != null) { + writeField("commit", action.commit, writer, context); + } + if (action.message != null) { + writeField("message", action.message, writer, context); + } + if (action.authorName != null) { + writeField("authorName", action.authorName, writer, context); + } + if (action.authorEmail != null) { + writeField("authorEmail", action.authorEmail, writer, context); + } + if (action.authorDate != null) { + writeField("authorDate", action.authorDate, writer, context); + } + if (action.committerName != null) { + writeField("committerName", action.committerName, writer, context); + } + if (action.committerEmail != null) { + writeField("committerEmail", action.committerEmail, writer, context); + } + if (action.committerDate != null) { + writeField("committerDate", action.committerDate, writer, context); + } + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + String tag = null; + String commit = null; + String message = null; + String authorName = null; + String authorEmail = null; + String authorDate = null; + String committerName = null; + String committerEmail = null; + String committerDate = null; + + while (reader.hasMoreChildren()) { + reader.moveDown(); + String fieldName = reader.getNodeName(); + switch (fieldName) { + case "tag": + tag = (String) context.convertAnother(null, String.class); + break; + case "commit": + commit = (String) context.convertAnother(null, String.class); + break; + case "message": + message = (String) context.convertAnother(null, String.class); + break; + case "authorName": + authorName = (String) context.convertAnother(null, String.class); + break; + case "authorEmail": + authorEmail = (String) context.convertAnother(null, String.class); + break; + case "authorDate": + authorDate = (String) context.convertAnother(null, String.class); + break; + case "committerName": + committerName = (String) context.convertAnother(null, String.class); + break; + case "committerEmail": + committerEmail = (String) context.convertAnother(null, String.class); + break; + case "committerDate": + committerDate = (String) context.convertAnother(null, String.class); + break; + } + reader.moveUp(); + } + + return new GitCommitAction(tag, commit, message, authorName, authorEmail, authorDate, committerName, committerEmail, committerDate); + } + } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java index 29e586ca3..f99ccd078 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java @@ -1,54 +1,122 @@ package org.datadog.jenkins.plugins.datadog.model; -import hudson.model.InvisibleAction; - -import java.io.Serializable; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; /** * Keeps the Git repository related information. */ -public class GitRepositoryAction extends InvisibleAction implements Serializable { +public class GitRepositoryAction extends DatadogPluginAction { private static final long serialVersionUID = 1L; - private final String repositoryURL; - private final String defaultBranch; + private volatile String repositoryURL; + private volatile String defaultBranch; + private volatile String branch; + + public GitRepositoryAction() { + } - private GitRepositoryAction(final Builder builder) { - this.repositoryURL = builder.repositoryURL; - this.defaultBranch = builder.defaultBranch; + public GitRepositoryAction(String repositoryURL, String defaultBranch, String branch) { + this.repositoryURL = repositoryURL; + this.defaultBranch = defaultBranch; + this.branch = branch; } - public String getRepositoryURL(){ + public String getRepositoryURL() { return repositoryURL; } + public void setRepositoryURL(String repositoryURL) { + this.repositoryURL = repositoryURL; + } + public String getDefaultBranch() { return defaultBranch; } - public static Builder newBuilder() { - return new Builder(); + public void setDefaultBranch(String defaultBranch) { + this.defaultBranch = defaultBranch; + } + + public String getBranch() { + return branch; } - public static class Builder { - private String repositoryURL; - private String defaultBranch; + public void setBranch(String branch) { + this.branch = branch; + } - private Builder(){} + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GitRepositoryAction that = (GitRepositoryAction) o; + return Objects.equals(repositoryURL, that.repositoryURL) && Objects.equals(defaultBranch, that.defaultBranch) && Objects.equals(branch, that.branch); + } + + @Override + public int hashCode() { + return Objects.hash(repositoryURL, defaultBranch, branch); + } + + @Override + public String toString() { + return "GitRepositoryAction{" + + "repositoryURL='" + repositoryURL + '\'' + + ", defaultBranch='" + defaultBranch + '\'' + + ", branch='" + branch + '\'' + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } - public Builder withRepositoryURL(final String repositoryURL) { - this.repositoryURL = repositoryURL; - return this; + @Override + public boolean canConvert(Class type) { + return GitRepositoryAction.class == type; } - public Builder withDefaultBranch(final String defaultBranch) { - this.defaultBranch = defaultBranch; - return this; + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + GitRepositoryAction action = (GitRepositoryAction) source; + if (action.repositoryURL != null) { + writeField("repositoryURL", action.repositoryURL, writer, context); + } + if (action.defaultBranch != null) { + writeField("defaultBranch", action.defaultBranch, writer, context); + } + if (action.branch != null) { + writeField("branch", action.branch, writer, context); + } } - public GitRepositoryAction build(){ - return new GitRepositoryAction(this); + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + GitRepositoryAction gitRepositoryAction = new GitRepositoryAction(); + while (reader.hasMoreChildren()) { + reader.moveDown(); + String fieldName = reader.getNodeName(); + switch (fieldName) { + case "repositoryURL": + gitRepositoryAction.setRepositoryURL((String) context.convertAnother(null, String.class)); + break; + case "defaultBranch": + gitRepositoryAction.setDefaultBranch((String) context.convertAnother(null, String.class)); + break; + case "branch": + gitRepositoryAction.setBranch((String) context.convertAnother(null, String.class)); + break; + } + reader.moveUp(); + } + return gitRepositoryAction; } } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java index 052d4edd3..8ec2cd3ad 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java @@ -1,21 +1,27 @@ package org.datadog.jenkins.plugins.datadog.model; -import hudson.model.InvisibleAction; - -import java.io.Serializable; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Collections; +import java.util.Objects; import java.util.Set; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; -public class PipelineNodeInfoAction extends InvisibleAction implements Serializable { +public class PipelineNodeInfoAction extends DatadogPluginAction { private final String nodeName; private final Set nodeLabels; - private final String nodeHostname; + private final String workspace; - public PipelineNodeInfoAction(final String nodeName, final Set nodeLabels, final String nodeHostname) { + public PipelineNodeInfoAction(final String nodeName, final Set nodeLabels, final String nodeHostname, String workspace) { this.nodeName = nodeName; this.nodeLabels = nodeLabels; this.nodeHostname = nodeHostname; + this.workspace = workspace; } public String getNodeName() { @@ -29,4 +35,88 @@ public Set getNodeLabels() { public String getNodeHostname() { return nodeHostname; } + + public String getWorkspace() { + return workspace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineNodeInfoAction that = (PipelineNodeInfoAction) o; + return Objects.equals(nodeName, that.nodeName) && Objects.equals(nodeLabels, that.nodeLabels) && Objects.equals(nodeHostname, that.nodeHostname) && Objects.equals(workspace, that.workspace); + } + + @Override + public int hashCode() { + return Objects.hash(nodeName, nodeLabels, nodeHostname, workspace); + } + + @Override + public String toString() { + return "PipelineNodeInfoAction{" + + "nodeName='" + nodeName + '\'' + + ", nodeLabels=" + nodeLabels + + ", nodeHostname='" + nodeHostname + '\'' + + ", workspace='" + workspace + '\'' + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return PipelineNodeInfoAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + PipelineNodeInfoAction action = (PipelineNodeInfoAction) source; + if (action.nodeName != null) { + writeField("nodeName", action.nodeName, writer, context); + } + if (action.nodeHostname != null) { + writeField("nodeHostname", action.nodeHostname, writer, context); + } + if (action.nodeLabels != null && !action.nodeLabels.isEmpty()) { + writeField("nodeLabels", action.nodeLabels, writer, context); + } + if (action.workspace != null) { + writeField("workspace", action.workspace, writer, context); + } + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + String nodeName = null; + String nodeHostname = null; + Set nodeLabels = Collections.emptySet(); + String workspace = null; + + while (reader.hasMoreChildren()) { + reader.moveDown(); + String fieldName = reader.getNodeName(); + switch (fieldName) { + case "nodeName": + nodeName = (String) context.convertAnother(null, String.class); + break; + case "nodeHostname": + nodeHostname = (String) context.convertAnother(null, String.class); + break; + case "nodeLabels": + nodeLabels = (Set) context.convertAnother(null, Set.class); + break; + case "workspace": + workspace = (String) context.convertAnother(null, String.class); + break; + } + reader.moveUp(); + } + + return new PipelineNodeInfoAction(nodeName, nodeLabels, nodeHostname, workspace); + } + } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java index 5f5599cc1..343dfbf47 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java @@ -1,40 +1,106 @@ package org.datadog.jenkins.plugins.datadog.model; -import hudson.model.InvisibleAction; -import hudson.model.Run; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -/** - * Keeps the Queue Info related to the FlowNode scheduled to be executed. - */ -public class PipelineQueueInfoAction extends InvisibleAction implements Serializable { +public class PipelineQueueInfoAction extends DatadogPluginAction { private static final long serialVersionUID = 1L; - private final ConcurrentMap queueDataByFlowNode; + private volatile long queueTimeMillis = -1; + private volatile long propagatedQueueTimeMillis = -1; + + public PipelineQueueInfoAction() {} + + public PipelineQueueInfoAction(long queueTimeMillis, long propagatedQueueTimeMillis) { + this.queueTimeMillis = queueTimeMillis; + this.propagatedQueueTimeMillis = propagatedQueueTimeMillis; + } + + public long getQueueTimeMillis() { + return queueTimeMillis; + } - public PipelineQueueInfoAction() { - this.queueDataByFlowNode = new ConcurrentHashMap<>(); + public PipelineQueueInfoAction setQueueTimeMillis(long queueTimeMillis) { + this.queueTimeMillis = queueTimeMillis; + return this; } - public FlowNodeQueueData get(final Run run, String flowNodeId) { - return this.queueDataByFlowNode.get(flowNodeId); + public long getPropagatedQueueTimeMillis() { + return propagatedQueueTimeMillis; } - public void put(final Run run, String flowNodeId, FlowNodeQueueData data) { - this.queueDataByFlowNode.put(flowNodeId, data); + public PipelineQueueInfoAction setPropagatedQueueTimeMillis(long propagatedQueueTimeMillis) { + this.propagatedQueueTimeMillis = propagatedQueueTimeMillis; + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PipelineQueueInfoAction that = (PipelineQueueInfoAction) o; + return queueTimeMillis == that.queueTimeMillis && propagatedQueueTimeMillis == that.propagatedQueueTimeMillis; + } + + @Override + public int hashCode() { + return Objects.hash(queueTimeMillis, propagatedQueueTimeMillis); } @Override public String toString() { - final StringBuilder sb = new StringBuilder("PipelineQueueInfoAction{"); - sb.append("queueDataByFlowNode=").append(queueDataByFlowNode); - sb.append('}'); - return sb.toString(); + return "QueueInfoAction{" + + "queueTimeMillis=" + queueTimeMillis + + ", propagatedQueueTimeMillis=" + propagatedQueueTimeMillis + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return PipelineQueueInfoAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + PipelineQueueInfoAction action = (PipelineQueueInfoAction) source; + if (action.queueTimeMillis != -1) { + writeField("queueTimeMillis", action.queueTimeMillis, writer, context); + } + if (action.propagatedQueueTimeMillis != -1) { + writeField("propagatedQueueTimeMillis", action.propagatedQueueTimeMillis, writer, context); + } + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + long queueTimeMillis = -1; + long propagatedQueueTimeMillis = -1; + + while (reader.hasMoreChildren()) { + reader.moveDown(); + String fieldName = reader.getNodeName(); + switch (fieldName) { + case "queueTimeMillis": + queueTimeMillis = (long) context.convertAnother(null, long.class); + break; + case "propagatedQueueTimeMillis": + propagatedQueueTimeMillis = (long) context.convertAnother(null, long.class); + break; + } + reader.moveUp(); + } + + return new PipelineQueueInfoAction(queueTimeMillis, propagatedQueueTimeMillis); + } } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java deleted file mode 100644 index bc80aa1f2..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import hudson.model.InvisibleAction; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; - -/** - * Keeps the Stage breakdown related information. - */ -public class StageBreakdownAction extends InvisibleAction implements Serializable { - - private static final long serialVersionUID = 1L; - - private final Map stageDataByName; - - public StageBreakdownAction() { - this.stageDataByName = new HashMap<>(); - } - - public Map getStageDataByName() { - return stageDataByName; - } - - public void put(String name, StageData stageData) { - this.stageDataByName.put(name, stageData); - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java new file mode 100644 index 000000000..7d293d111 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java @@ -0,0 +1,35 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import org.datadog.jenkins.plugins.datadog.DatadogUtilities; + +public enum Status { + UNKNOWN((byte) 0), SUCCESS((byte) 1), UNSTABLE((byte) 2), ERROR((byte) 3), SKIPPED((byte) 4), CANCELED((byte) 5); + + private final byte weight; + + Status(byte weight) { + this.weight = weight; + } + + public String toTag() { + return toString().toLowerCase(); + } + + public static Status fromJenkinsResult(String status) { + return valueOf(DatadogUtilities.statusFromResult(status).toUpperCase()); + } + + /** + * Combines two statuses, returning the worst one + * (based on {@link hudson.model.Result#combine(hudson.model.Result, hudson.model.Result)}). + */ + public static Status combine(Status a, Status b) { + if (a == null) { + return b; + } + if (b == null) { + return a; + } + return a.weight > b.weight ? a : b; + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java deleted file mode 100644 index 4745aba2e..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java +++ /dev/null @@ -1,157 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import hudson.EnvVars; -import hudson.FilePath; -import hudson.model.Computer; -import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; -import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; -import org.jenkinsci.plugins.workflow.steps.StepContext; - -import java.io.Serializable; -import java.util.Collections; -import java.util.Map; -import java.util.Set; -import java.util.logging.Logger; -import java.util.stream.Collectors; - -public class StepData implements Serializable { - - private static final long serialVersionUID = 1L; - - private static transient final Logger logger = Logger.getLogger(StepData.class.getName()); - - private final Map envVars; - private final String nodeName; - private final String nodeHostname; - private final String workspace; - private final Set nodeLabels; - - public StepData(final StepContext stepContext){ - long start = System.currentTimeMillis(); - try { - this.envVars = getEnvVars(stepContext); - this.nodeName = getNodeName(stepContext); - this.nodeHostname = getNodeHostname(stepContext, this.envVars); - this.workspace = getNodeWorkspace(stepContext); - this.nodeLabels = getNodeLabels(stepContext); - } finally { - long end = System.currentTimeMillis(); - DatadogAudit.log("StepData.ctor", start, end); - } - } - - public Map getEnvVars() { - return envVars; - } - - public String getNodeName() { - return nodeName; - } - - public String getNodeHostname() { - return nodeHostname; - } - - public String getWorkspace() { - return workspace; - } - - public Set getNodeLabels() { - return nodeLabels; - } - - /** - * Returns the workspace filepath of the remote node which is executing a determined {@code Step} - * @param stepContext - * @return absolute filepath of the workspace of the remote node. - */ - private String getNodeWorkspace(final StepContext stepContext) { - FilePath filePath = null; - try { - filePath = stepContext.get(FilePath.class); - } catch (Exception e){ - logger.fine("Unable to extract FilePath information of the StepContext."); - } - - if(filePath == null) { - return null; - } - - return filePath.getRemote(); - } - - /** - * Returns the hostname of the remote node which is executing a determined {@code Step} - * See {@code Computer.getHostName()} - * @param stepContext - * @return hostname of the remote node. - */ - private String getNodeHostname(final StepContext stepContext, Map envVars) { - String hostname = envVars.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME); - if (hostname == null) { - Computer computer; - try { - computer = stepContext.get(Computer.class); - if(computer != null) { - hostname = computer.getHostName(); - } - } catch (Exception e){ - logger.fine("Unable to extract hostname from StepContext."); - } - } - return hostname; - } - - - /** - * Returns the nodeName of the remote node which is executing a determined {@code Step} - * @param stepContext - * @return node name of the remote node. - */ - private String getNodeName(StepContext stepContext) { - try { - Computer computer = stepContext.get(Computer.class); - return DatadogUtilities.getNodeName(computer); - } catch (Exception e){ - logger.fine("Unable to extract the node name from StepContext."); - return null; - } - } - - - /** - * Returns the nodeLabels of the remote node which is executing a determined {@code Step} - * @param stepContext - * @return node labels of the remote node. - */ - private Set getNodeLabels(StepContext stepContext) { - try { - Computer computer = stepContext.get(Computer.class); - return DatadogUtilities.getNodeLabels(computer); - } catch (Exception e) { - logger.fine("Unable to extract the node labels from StepContext."); - return Collections.emptySet(); - } - } - - - /** - * Returns {@code Map} with environment variables of a certain {@code StepContext} - * @param stepContext - * @return map with environment variables of a stepContext. - */ - private Map getEnvVars(StepContext stepContext) { - EnvVars envVarsObj = null; - try { - envVarsObj = stepContext.get(EnvVars.class); - } catch (Exception e){ - logger.fine("Unable to extract environment variables from StepContext."); - } - - if(envVarsObj == null) { - return Collections.emptyMap(); - } - return envVarsObj.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java deleted file mode 100644 index 82f38f194..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java +++ /dev/null @@ -1,16 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import java.io.Serializable; - -public class StepTraceData implements Serializable { - - private final long spanId; - - public StepTraceData(final long spanId) { - this.spanId = spanId; - } - - public long getSpanId() { - return spanId; - } -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java new file mode 100644 index 000000000..e398b9146 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java @@ -0,0 +1,82 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import org.datadog.jenkins.plugins.datadog.traces.IdGenerator; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; + +public class TraceInfoAction extends DatadogPluginAction { + + private final ConcurrentMap spanIdByNodeId; + + public TraceInfoAction() { + this(Collections.emptyMap()); + } + + public TraceInfoAction(Map spanIdByNodeId) { + this.spanIdByNodeId = new ConcurrentHashMap<>(spanIdByNodeId); + } + + public Long getOrCreate(String flowNodeId) { + return spanIdByNodeId.computeIfAbsent(flowNodeId, k -> IdGenerator.generate()); + } + + public Long removeOrCreate(String flowNodeId) { + Long existingId = spanIdByNodeId.remove(flowNodeId); + if (existingId != null) { + return existingId; + } else { + return IdGenerator.generate(); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TraceInfoAction that = (TraceInfoAction) o; + return Objects.equals(spanIdByNodeId, that.spanIdByNodeId); + } + + @Override + public int hashCode() { + return Objects.hash(spanIdByNodeId); + } + + @Override + public String toString() { + return "TraceInfoAction{" + + "infoByFlowNodeId=" + spanIdByNodeId + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return TraceInfoAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + TraceInfoAction action = (TraceInfoAction) source; + writeField("infoByFlowNodeId", action.spanIdByNodeId, writer, context); + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + Map infoByFlowNodeId = readField(reader, context, Map.class); + return new TraceInfoAction(infoByFlowNodeId); + } + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java new file mode 100644 index 000000000..dbd2aa832 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java @@ -0,0 +1,65 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; + +public class DequeueAction extends QueueInfoAction { + + private static final long serialVersionUID = 1L; + + private final long queueTimeNanos; + + public DequeueAction(long queueTimeNanos) { + this.queueTimeNanos = queueTimeNanos; + } + + public long getQueueTimeNanos() { + return queueTimeNanos; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DequeueAction action = (DequeueAction) o; + return queueTimeNanos == action.queueTimeNanos; + } + + @Override + public int hashCode() { + return Objects.hash(queueTimeNanos); + } + + @Override + public String toString() { + return "DequeueAction{queueTimeNanos=" + queueTimeNanos + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return DequeueAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + DequeueAction action = (DequeueAction) source; + writeField("queueTimeNanos", action.queueTimeNanos, writer, context); + context.convertAnother(action.queueTimeNanos); + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + long queueTimeNanos = readField(reader, context, long.class); + return new DequeueAction(queueTimeNanos); + } + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java new file mode 100644 index 000000000..433c22c33 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java @@ -0,0 +1,64 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; + +public class EnqueueAction extends QueueInfoAction { + + private static final long serialVersionUID = 1L; + + private final long timestampNanos; + + public EnqueueAction(long timestampNanos) { + this.timestampNanos = timestampNanos; + } + + public long getTimestampNanos() { + return timestampNanos; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EnqueueAction that = (EnqueueAction) o; + return timestampNanos == that.timestampNanos; + } + + @Override + public int hashCode() { + return Objects.hash(timestampNanos); + } + + @Override + public String toString() { + return "EnqueueAction{timestampNanos=" + timestampNanos + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return EnqueueAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + EnqueueAction action = (EnqueueAction) source; + writeField("timestampNanos", action.timestampNanos, writer, context); + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + long timestampNanos = readField(reader, context, long.class); + return new EnqueueAction(timestampNanos); + } + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java new file mode 100644 index 000000000..fcda54a34 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java @@ -0,0 +1,125 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; +import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; + +public class NodeInfoAction extends DatadogPluginAction { + + private static final long serialVersionUID = 1L; + + private final String nodeName; + private final String nodeHostname; + private final Set nodeLabels; + private final String nodeWorkspace; + + public NodeInfoAction(String nodeName, String nodeHostname, Set nodeLabels, String nodeWorkspace) { + this.nodeName = nodeName; + this.nodeHostname = nodeHostname; + this.nodeLabels = nodeLabels; + this.nodeWorkspace = nodeWorkspace; + } + + public String getNodeName() { + return nodeName; + } + + public String getNodeHostname() { + return nodeHostname; + } + + public Set getNodeLabels() { + return nodeLabels; + } + + public String getNodeWorkspace() { + return nodeWorkspace; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + NodeInfoAction that = (NodeInfoAction) o; + return Objects.equals(nodeName, that.nodeName) && Objects.equals(nodeHostname, that.nodeHostname) && Objects.equals(nodeLabels, that.nodeLabels) && Objects.equals(nodeWorkspace, that.nodeWorkspace); + } + + @Override + public int hashCode() { + return Objects.hash(nodeName, nodeHostname, nodeLabels, nodeWorkspace); + } + + @Override + public String toString() { + return "NodeInfoAction{" + + "nodeName='" + nodeName + '\'' + + ", nodeHostname='" + nodeHostname + '\'' + + ", nodeLabels=" + nodeLabels + + ", nodeWorkspace=" + nodeWorkspace + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return NodeInfoAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + NodeInfoAction action = (NodeInfoAction) source; + if (action.nodeName != null) { + writeField("nodeName", action.nodeName, writer, context); + } + if (action.nodeHostname != null) { + writeField("nodeHostname", action.nodeHostname, writer, context); + } + if (action.nodeLabels != null && !action.nodeLabels.isEmpty()) { + writeField("nodeLabels", action.nodeLabels, writer, context); + } + if (action.nodeWorkspace != null && !action.nodeWorkspace.isEmpty()) { + writeField("nodeWorkspace", action.nodeWorkspace, writer, context); + } + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + String nodeName = null; + String nodeHostname = null; + Set nodeLabels = Collections.emptySet(); + String nodeWorkspace = null; + + while (reader.hasMoreChildren()) { + reader.moveDown(); + String fieldName = reader.getNodeName(); + switch (fieldName) { + case "nodeName": + nodeName = (String) context.convertAnother(null, String.class); + break; + case "nodeHostname": + nodeHostname = (String) context.convertAnother(null, String.class); + break; + case "nodeLabels": + nodeLabels = (Set) context.convertAnother(null, Set.class); + break; + case "nodeWorkspace": + nodeWorkspace = (String) context.convertAnother(null, String.class); + break; + } + reader.moveUp(); + } + return new NodeInfoAction(nodeName, nodeHostname, nodeLabels, nodeWorkspace); + } + } + +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java new file mode 100644 index 000000000..9fdbbb533 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java @@ -0,0 +1,10 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction; + +/** + * A marker interface for enqueue and dequeue actions. + * Allows to replace an enqueue action with a dequeue action in one call, avoiding writing to disk twice. + */ +public class QueueInfoAction extends DatadogPluginAction { +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java new file mode 100644 index 000000000..cad240997 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java @@ -0,0 +1,77 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction; +import org.datadog.jenkins.plugins.datadog.model.Status; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; + +public class StatusAction extends DatadogPluginAction { + + private static final long serialVersionUID = 1L; + + private final Status status; + private final boolean propagate; + + public StatusAction(Status status, boolean propagate) { + this.status = status; + this.propagate = propagate; + } + + public Status getStatus() { + return status; + } + + public boolean isPropagate() { + return propagate; + } + + @Override + public String toString() { + return "StatusAction{" + + "status='" + status + '\'' + + ", propagate=" + propagate + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StatusAction that = (StatusAction) o; + return propagate == that.propagate && status == that.status; + } + + @Override + public int hashCode() { + return Objects.hash(status, propagate); + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return StatusAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + StatusAction action = (StatusAction) source; + writeField("status", action.status, writer, context); + writeField("propagate", action.propagate, writer, context); + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + Status status = readField(reader, context, Status.class); + boolean propagate = readField(reader, context, boolean.class); + return new StatusAction(status, propagate); + } + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java b/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java index 24f3e2468..e2e056930 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java @@ -14,7 +14,9 @@ import jenkins.YesNoMaybe; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.logs.DatadogTaskListenerDecorator; +import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.tracer.DatadogTracerJobProperty; +import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; import org.jenkinsci.plugins.workflow.job.WorkflowRun; import org.jenkinsci.plugins.workflow.log.TaskListenerDecorator; import org.jenkinsci.plugins.workflow.steps.BodyExecutionCallback; diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java index 05a05da32..fd5a35285 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java @@ -1,31 +1,101 @@ package org.datadog.jenkins.plugins.datadog.traces; -import hudson.model.InvisibleAction; -import org.datadog.jenkins.plugins.datadog.model.BuildData; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; +import java.util.Objects; +import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; - -import java.io.Serializable; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; /** * Keeps build span propagation */ -public class BuildSpanAction extends InvisibleAction implements Serializable { +public class BuildSpanAction extends DatadogPluginAction { private static final long serialVersionUID = 1L; - private final BuildData buildData; private final TraceSpan.TraceSpanContext buildSpanContext; + private volatile String buildUrl; - public BuildSpanAction(final BuildData buildData, final TraceSpan.TraceSpanContext buildSpanContext){ - this.buildData = buildData; + public BuildSpanAction(final TraceSpan.TraceSpanContext buildSpanContext){ this.buildSpanContext = buildSpanContext; } - public BuildData getBuildData() { - return buildData; + public BuildSpanAction(TraceSpan.TraceSpanContext buildSpanContext, String buildUrl) { + this.buildSpanContext = buildSpanContext; + this.buildUrl = buildUrl; } public TraceSpan.TraceSpanContext getBuildSpanContext() { return buildSpanContext; } + + public String getBuildUrl() { + return buildUrl; + } + + public BuildSpanAction setBuildUrl(String buildUrl) { + this.buildUrl = buildUrl; + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BuildSpanAction that = (BuildSpanAction) o; + return Objects.equals(buildSpanContext, that.buildSpanContext) && Objects.equals(buildUrl, that.buildUrl); + } + + @Override + public int hashCode() { + return Objects.hash(buildSpanContext, buildUrl); + } + + @Override + public String toString() { + return "BuildSpanAction{" + + "buildSpanContext=" + buildSpanContext + + ", buildUrl=" + buildUrl + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return BuildSpanAction.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + BuildSpanAction action = (BuildSpanAction) source; + writeField("spanContext", action.buildSpanContext, writer, context); + if (action.buildUrl != null) { + writeField("buildUrl", action.buildUrl, writer, context); + } + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + TraceSpan.TraceSpanContext spanContext = readField(reader, context, TraceSpan.TraceSpanContext.class); + + String buildUrl = null; + while (reader.hasMoreChildren()) { + reader.moveDown(); + String fieldName = reader.getNodeName(); + if ("buildUrl".equals(fieldName)) { + buildUrl = (String) context.convertAnother(null, String.class); + } + reader.moveUp(); + } + + return new BuildSpanAction(spanContext, buildUrl); + } + } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java index d5852d3d2..7dfd2003d 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java @@ -2,22 +2,28 @@ import hudson.model.Cause; import hudson.model.Run; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Map; +import java.util.Queue; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; import org.datadog.jenkins.plugins.datadog.model.StageData; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.json.JsonUtils; +import org.jenkinsci.plugins.workflow.flow.FlowExecution; +import org.jenkinsci.plugins.workflow.graph.BlockEndNode; +import org.jenkinsci.plugins.workflow.graph.BlockStartNode; +import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.jenkinsci.plugins.workflow.job.WorkflowRun; /** * Base class for DatadogTraceBuildLogic and DatadogPipelineBuildLogic @@ -29,26 +35,7 @@ public abstract class DatadogBaseBuildLogic { private static final Logger logger = Logger.getLogger(DatadogBaseBuildLogic.class.getName()); @Nullable - public abstract JSONObject finishBuildTrace(final BuildData buildData, final Run run); - - protected String getNodeName(Run run, BuildData buildData, BuildData updatedBuildData) { - final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); - if(pipelineNodeInfoAction != null){ - return pipelineNodeInfoAction.getNodeName(); - } - - return buildData.getNodeName("").isEmpty() ? updatedBuildData.getNodeName("") : buildData.getNodeName(""); - } - - protected String getNodeHostname(Run run, BuildData updatedBuildData) { - final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); - if(pipelineNodeInfoAction != null){ - return pipelineNodeInfoAction.getNodeHostname(); - } else if (!updatedBuildData.getHostname("").isEmpty()) { - return updatedBuildData.getHostname(""); - } - return null; - } + public abstract JSONObject toJson(final BuildData buildData, final Run run); @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") protected Set getNodeLabels(Run run, final String nodeName) { @@ -96,13 +83,22 @@ protected long getMillisInQueue(BuildData buildData) { } protected String getStageBreakdown(Run run) { - final StageBreakdownAction stageBreakdownAction = run.getAction(StageBreakdownAction.class); - if(stageBreakdownAction == null) { + if (!(run instanceof WorkflowRun)) { + return null; + } + + WorkflowRun workflowRun = (WorkflowRun) run; + FlowExecution execution = workflowRun.getExecution(); + if (execution == null) { return null; } - final Map stageDataByName = stageBreakdownAction.getStageDataByName(); - final List stages = new ArrayList<>(stageDataByName.values()); + List currentHeads = execution.getCurrentHeads(); + if (currentHeads == null || currentHeads.isEmpty()) { + return null; + } + + final List stages = traverseStages(currentHeads); Collections.sort(stages); final String stagesJson = JsonUtils.toJson(new ArrayList<>(stages)); @@ -114,6 +110,41 @@ protected String getStageBreakdown(Run run) { return stagesJson; } + private List traverseStages(List heads) { + List stages = new ArrayList<>(); + Queue nodes = new ArrayDeque<>(heads); + while (!nodes.isEmpty()) { + FlowNode node = nodes.poll(); + nodes.addAll(node.getParents()); + + if (!(node instanceof BlockEndNode)) { + continue; + } + + BlockEndNode endNode = (BlockEndNode) node; + BlockStartNode startNode = endNode.getStartNode(); + if (!DatadogUtilities.isStageNode(startNode)) { + continue; + } + + long startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(startNode)); + long endTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(endNode)); + if (startTimeMicros <= 0 || endTimeMicros <= 0) { + logger.fine("Skipping stage " + startNode.getDisplayName() + " because it has no time info " + + "(start: " + startTimeMicros + ", end: " + endTimeMicros + ")"); + continue; + } + + StageData stageData = new StageData.Builder() + .withName(startNode.getDisplayName()) + .withStartTimeInMicros(startTimeMicros) + .withEndTimeInMicros(endTimeMicros) + .build(); + stages.add(stageData); + } + return stages; + } + // Returns true if the run causes contains a Cause.UserIdCause public boolean isTriggeredManually(Run run) { final List causes = run.getCauses(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java index 54526597d..88730b628 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java @@ -1,28 +1,17 @@ package org.datadog.jenkins.plugins.datadog.traces; import hudson.model.Run; -import java.util.Collection; +import java.io.IOException; import java.util.Collections; import java.util.HashSet; -import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeoutException; -import java.util.logging.Logger; -import javax.annotation.Nonnull; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipeline; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; -import org.datadog.jenkins.plugins.datadog.util.TagsUtil; -import org.jenkinsci.plugins.workflow.graph.FlowEndNode; -import org.jenkinsci.plugins.workflow.graph.FlowNode; -import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner; + /** * Base class with shared code for DatadogTracePipelineLogic and DatadogWebhookPipelineLogic @@ -31,29 +20,8 @@ public abstract class DatadogBasePipelineLogic { protected static final String CI_PROVIDER = "jenkins"; protected static final String HOSTNAME_NONE = "none"; - private static final Logger logger = Logger.getLogger(DatadogBasePipelineLogic.class.getName()); - - @Nonnull - public abstract Collection execute(FlowNode flowNode, Run run); - - protected BuildPipelineNode buildPipelineTree(FlowEndNode flowEndNode) { - - final BuildPipeline pipeline = new BuildPipeline(); - - // As this logic is evaluated in the last node of the graph, - // getCurrentHeads() method returns all nodes as a plain list. - final List currentHeads = flowEndNode.getExecution().getCurrentHeads(); - - // Provided that plain list of nodes, the DepthFirstScanner algorithm - // is used to visit efficiently every node in form of a DAG. - final DepthFirstScanner scanner = new DepthFirstScanner(); - scanner.setup(currentHeads); - - // Every found flow node of the DAG is added to the BuildPipeline instance. - scanner.forEach(pipeline::add); - return pipeline.buildTree(); // returns the root node - } + public abstract JSONObject toJson(BuildPipelineNode current, Run run) throws IOException, InterruptedException; @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") protected Set getNodeLabels(Run run, BuildPipelineNode current, String nodeName) { @@ -64,16 +32,17 @@ protected Set getNodeLabels(Run run, BuildPipelineNode current, String n return pipelineNodeInfoAction.getNodeLabels(); } - if (run.getExecutor() != null && run.getExecutor().getOwner() != null) { - Set nodeLabels = DatadogUtilities.getNodeLabels(run.getExecutor().getOwner()); - if (nodeLabels != null && !nodeLabels.isEmpty()) { - return nodeLabels; + if (DatadogUtilities.isMainNode(nodeName)) { + // executor owner is the master node even if the pipeline contains an "agent" block + if (run.getExecutor() != null) { + Set nodeLabels = DatadogUtilities.getNodeLabels(run.getExecutor().getOwner()); + if (!nodeLabels.isEmpty()) { + return nodeLabels; + } } - } - // If there is no labels and the node name is master, - // we force the label "master". - if (DatadogUtilities.isMainNode(nodeName)) { + // If there is no labels and the node name is master, + // we force the label "master". final Set masterLabels = new HashSet<>(); masterLabels.add("master"); return masterLabels; @@ -82,64 +51,21 @@ protected Set getNodeLabels(Run run, BuildPipelineNode current, String n return Collections.emptySet(); } - protected String getNodeName(Run run, BuildPipelineNode current, BuildData buildData) { - final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); - - if(current.getNodeName() != null) { + protected String getNodeName(BuildPipelineNode current, BuildData buildData) { + if (current.getNodeName() != null) { return current.getNodeName(); - } else if (pipelineNodeInfoAction != null) { - return pipelineNodeInfoAction.getNodeName(); } - return buildData.getNodeName(""); } - protected String getNodeHostname(Run run, BuildPipelineNode current) { - final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); - if(current.getNodeHostname() != null) { + protected String getNodeHostname(BuildPipelineNode current, BuildData buildData) { + if (current.getNodeHostname() != null) { return current.getNodeHostname(); - } else if (pipelineNodeInfoAction != null) { - return pipelineNodeInfoAction.getNodeHostname(); - } - return null; - } - - protected boolean isTraceable(BuildPipelineNode node) { - if (node.getStartTimeMicros() == -1L) { - logger.severe("Unable to send trace of node: " + node.getName() + ". Start Time is not set"); - return false; - } - - if(node.getEndTimeMicros() == -1L) { - logger.severe("Unable to send trace of node: " + node.getName() + ". End Time is not set"); - return false; - } - - if(node.isInternal()){ - logger.fine("Node: " + node.getName() + " is Jenkins internal. We skip it."); - return false; - } - - return true; - } - - protected void updateCIGlobalTags(Run run) { - long start = System.currentTimeMillis(); - try { - final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class); - if(ciGlobalTagsAction == null) { - return; - } - - final Map tags = TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run)); - ciGlobalTagsAction.putAll(tags); - } finally { - long end = System.currentTimeMillis(); - DatadogAudit.log("DatadogTracePipelineLogic.updateCIGlobalTags", start, end); } + return buildData.getHostname(""); } protected String buildOperationName(BuildPipelineNode current) { - return CI_PROVIDER + "." + current.getType().name().toLowerCase() + ((current.isInternal()) ? ".internal" : ""); + return CI_PROVIDER + "." + current.getType().name().toLowerCase(); } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java index 518abceda..4cf0a3346 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java @@ -1,7 +1,6 @@ package org.datadog.jenkins.plugins.datadog.traces; import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.statusFromResult; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.toJson; import static org.datadog.jenkins.plugins.datadog.traces.CITags.Values.ORIGIN_CIAPP_PIPELINE; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.filterSensitiveInfo; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch; @@ -10,21 +9,23 @@ import hudson.model.Result; import hudson.model.Run; +import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.datadog.jenkins.plugins.datadog.util.TagsUtil; /** - * Keeps the logic to send traces related to Jenkins Build. + * Keeps the logic to create traces related to Jenkins Build. * This gets called once per job (datadog level: pipeline) */ public class DatadogTraceBuildLogic extends DatadogBaseBuildLogic { @@ -33,14 +34,16 @@ public class DatadogTraceBuildLogic extends DatadogBaseBuildLogic { private final JsonTraceSpanMapper jsonTraceSpanMapper = new JsonTraceSpanMapper(); + @Nullable @Override - public JSONObject finishBuildTrace(final BuildData buildData, final Run run) { - TraceSpan span = createSpan(buildData, run); + public JSONObject toJson(final BuildData buildData, final Run run) { + TraceSpan span = toSpan(buildData, run); return span != null ? jsonTraceSpanMapper.map(span) : null; } + @Nullable // hook for tests - public TraceSpan createSpan(final BuildData buildData, final Run run) { + public TraceSpan toSpan(final BuildData buildData, final Run run) { if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { return null; } @@ -55,12 +58,6 @@ public TraceSpan createSpan(final BuildData buildData, final Run run) { return null; } - // In this point of the execution, the BuildData stored within - // BuildSpanAction has been updated by the information available - // inside the Pipeline steps. (Only applicable if the build is - // based on Jenkins Pipelines). - final BuildData updatedBuildData = buildSpanAction.getBuildData(); - final String prefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); final String buildLevel = BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(); final long endTimeMicros = buildData.getEndTime(0L) * 1000; @@ -80,44 +77,36 @@ public TraceSpan createSpan(final BuildData buildData, final Run run) { buildSpan.putMeta(prefix + CITags._ID, buildData.getBuildTag("")); buildSpan.putMeta(prefix + CITags._NUMBER, buildData.getBuildNumber("")); buildSpan.putMeta(prefix + CITags._URL, buildData.getBuildUrl("")); - buildSpan.putMetric(CITags.QUEUE_TIME, TimeUnit.MILLISECONDS.toSeconds(getMillisInQueue(updatedBuildData))); + buildSpan.putMetric(CITags.QUEUE_TIME, TimeUnit.MILLISECONDS.toSeconds(getMillisInQueue(buildData))); // Pipeline Parameters if(!buildData.getBuildParameters().isEmpty()) { - buildSpan.putMeta(CITags.CI_PARAMETERS, toJson(buildData.getBuildParameters())); + buildSpan.putMeta(CITags.CI_PARAMETERS, DatadogUtilities.toJson(buildData.getBuildParameters())); } - final String workspace = buildData.getWorkspace("").isEmpty() ? updatedBuildData.getWorkspace("") : buildData.getWorkspace(""); - buildSpan.putMeta(CITags.WORKSPACE_PATH, workspace); - - final String nodeName = getNodeName(run, buildData, updatedBuildData); - buildSpan.putMeta(CITags.NODE_NAME, nodeName); + buildSpan.putMeta(CITags.WORKSPACE_PATH, buildData.getWorkspace("")); + buildSpan.putMeta(CITags.NODE_NAME, buildData.getNodeName("")); - final String nodeLabelsJson = toJson(getNodeLabels(run, nodeName)); - if(!nodeLabelsJson.isEmpty()){ + final String nodeLabelsJson = DatadogUtilities.toJson(getNodeLabels(run, buildData.getNodeName(""))); + if(nodeLabelsJson != null && !nodeLabelsJson.isEmpty()){ buildSpan.putMeta(CITags.NODE_LABELS, nodeLabelsJson); + } else { + buildSpan.putMeta(CITags.NODE_LABELS, "[]"); } // If the NodeName == "master", we don't set _dd.hostname. It will be overridden by the Datadog Agent. (Traces are only available using Datadog Agent) - if(!DatadogUtilities.isMainNode(nodeName)) { - final String workerHostname = getNodeHostname(run, updatedBuildData); - // If the worker hostname is equals to controller hostname but the node name is not master/built-in then we - // could not detect the worker hostname properly. Check if it's set in the environment, otherwise set to none. - if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) { - String envHostnameOrNone = DatadogUtilities.getHostnameFromWorkerEnv(run).orElse(HOSTNAME_NONE); - buildSpan.putMeta(CITags._DD_HOSTNAME, envHostnameOrNone); - } else { - buildSpan.putMeta(CITags._DD_HOSTNAME, (workerHostname != null) ? workerHostname : HOSTNAME_NONE); - } + if(!DatadogUtilities.isMainNode(buildData.getNodeName(""))) { + final String workerHostname = buildData.getHostname(""); + buildSpan.putMeta(CITags._DD_HOSTNAME, !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE); } // Git Info - final String gitUrl = buildData.getGitUrl("").isEmpty() ? updatedBuildData.getGitUrl("") : buildData.getGitUrl(""); + final String gitUrl = buildData.getGitUrl(""); if(StringUtils.isNotEmpty(gitUrl)){ buildSpan.putMeta(CITags.GIT_REPOSITORY_URL, filterSensitiveInfo(gitUrl)); } - final String gitCommit = buildData.getGitCommit("").isEmpty() ? updatedBuildData.getGitCommit("") : buildData.getGitCommit(""); + final String gitCommit = buildData.getGitCommit(""); if(!isValidCommit(gitCommit)) { logger.warning("Couldn't find a valid commit for pipelineID '"+buildData.getBuildTag("")+"'. GIT_COMMIT environment variable was not found or has invalid SHA1 string: " + gitCommit); } @@ -127,47 +116,47 @@ public TraceSpan createSpan(final BuildData buildData, final Run run) { buildSpan.putMeta(CITags.GIT_COMMIT_SHA, gitCommit); } - final String gitMessage = buildData.getGitMessage("").isEmpty() ? updatedBuildData.getGitMessage("") : buildData.getGitMessage(""); + final String gitMessage = buildData.getGitMessage(""); if(StringUtils.isNotEmpty(gitMessage)){ buildSpan.putMeta(CITags.GIT_COMMIT_MESSAGE, gitMessage); } - final String gitAuthor = buildData.getGitAuthorName("").isEmpty() ? updatedBuildData.getGitAuthorName("") : buildData.getGitAuthorName(""); + final String gitAuthor = buildData.getGitAuthorName(""); if(StringUtils.isNotEmpty(gitAuthor)){ buildSpan.putMeta(CITags.GIT_COMMIT_AUTHOR_NAME, gitAuthor); } - final String gitAuthorEmail = buildData.getGitAuthorEmail("").isEmpty() ? updatedBuildData.getGitAuthorEmail("") : buildData.getGitAuthorEmail(""); + final String gitAuthorEmail = buildData.getGitAuthorEmail(""); if(StringUtils.isNotEmpty(gitAuthorEmail)){ buildSpan.putMeta(CITags.GIT_COMMIT_AUTHOR_EMAIL, gitAuthorEmail); } - final String gitAuthorDate = buildData.getGitAuthorDate("").isEmpty() ? updatedBuildData.getGitAuthorDate("") : buildData.getGitAuthorDate(""); + final String gitAuthorDate = buildData.getGitAuthorDate(""); if(StringUtils.isNotEmpty(gitAuthorDate)){ buildSpan.putMeta(CITags.GIT_COMMIT_AUTHOR_DATE, gitAuthorDate); } - final String gitCommitter = buildData.getGitCommitterName("").isEmpty() ? updatedBuildData.getGitCommitterName("") : buildData.getGitCommitterName(""); + final String gitCommitter = buildData.getGitCommitterName(""); if(StringUtils.isNotEmpty(gitCommitter)){ buildSpan.putMeta(CITags.GIT_COMMIT_COMMITTER_NAME, gitCommitter); } - final String gitCommitterEmail = buildData.getGitCommitterEmail("").isEmpty() ? updatedBuildData.getGitCommitterEmail("") : buildData.getGitCommitterEmail(""); + final String gitCommitterEmail = buildData.getGitCommitterEmail(""); if(StringUtils.isNotEmpty(gitCommitterEmail)){ buildSpan.putMeta(CITags.GIT_COMMIT_COMMITTER_EMAIL, gitCommitterEmail); } - final String gitCommitterDate = buildData.getGitCommitterDate("").isEmpty() ? updatedBuildData.getGitCommitterDate("") : buildData.getGitCommitterDate(""); + final String gitCommitterDate = buildData.getGitCommitterDate(""); if(StringUtils.isNotEmpty(gitCommitterDate)){ buildSpan.putMeta(CITags.GIT_COMMIT_COMMITTER_DATE, gitCommitterDate); } - final String gitDefaultBranch = buildData.getGitDefaultBranch("").isEmpty() ? updatedBuildData.getGitDefaultBranch("") : buildData.getGitDefaultBranch(""); + final String gitDefaultBranch = buildData.getGitDefaultBranch(""); if(StringUtils.isNotEmpty(gitDefaultBranch)){ buildSpan.putMeta(CITags.GIT_DEFAULT_BRANCH, gitDefaultBranch); } - final String rawGitBranch = buildData.getBranch("").isEmpty() ? updatedBuildData.getBranch("") : buildData.getBranch(""); + final String rawGitBranch = buildData.getBranch(""); final String gitBranch = normalizeBranch(rawGitBranch); if(StringUtils.isNotEmpty(gitBranch)) { buildSpan.putMeta(CITags.GIT_BRANCH, gitBranch); @@ -175,7 +164,7 @@ public TraceSpan createSpan(final BuildData buildData, final Run run) { // Check if the user set manually the DD_GIT_TAG environment variable. // Otherwise, Jenkins reports the tag in the Git branch information. (e.g. origin/tags/0.1.0) - final String gitTag = Optional.of(buildData.getGitTag("").isEmpty() ? updatedBuildData.getGitTag("") : buildData.getGitTag("")) + final String gitTag = Optional.of(buildData.getGitTag("")) .filter(tag -> !tag.isEmpty()) .orElse(normalizeTag(rawGitBranch)); if(StringUtils.isNotEmpty(gitTag)) { @@ -214,20 +203,18 @@ public TraceSpan createSpan(final BuildData buildData, final Run run) { buildSpan.setError(true); } - // CI Tags propagation - final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class); - if(ciGlobalTagsAction != null) { - final Map tags = ciGlobalTagsAction.getTags(); - for(Map.Entry tagEntry : tags.entrySet()) { - buildSpan.putMeta(tagEntry.getKey(), tagEntry.getValue()); - } + Map globalTags = new HashMap<>(buildData.getTagsForTraces()); + globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run))); + + for(Map.Entry tagEntry : globalTags.entrySet()) { + buildSpan.putMeta(tagEntry.getKey(), tagEntry.getValue()); } // If the build is a Jenkins Pipeline, the queue time is included in the root span duration. // We need to adjust the endTime of the root span subtracting the queue time reported by its child span. // The propagated queue time is set DatadogTracePipelineLogic#updateBuildData method. // The queue time reported by DatadogBuildListener#onStarted method is not included in the root span duration. - final long propagatedMillisInQueue = Math.max(updatedBuildData.getPropagatedMillisInQueue(-1L), 0); + final long propagatedMillisInQueue = Math.max(buildData.getPropagatedMillisInQueue(-1L), 0); // Although the queue time happens before the span startTime, we cannot remove it from the startTime // because there is no API to do it at the end of the trace. Additionally, we cannot create the root span diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java index cc6ae4596..1c1e414f0 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java @@ -1,7 +1,5 @@ package org.datadog.jenkins.plugins.datadog.traces; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.cleanUpTraceActions; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.statusFromResult; import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.toJson; import static org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode.NodeType.PIPELINE; import static org.datadog.jenkins.plugins.datadog.traces.CITags.Values.ORIGIN_CIAPP_PIPELINE; @@ -10,27 +8,22 @@ import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag; import hudson.model.Run; +import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; +import org.datadog.jenkins.plugins.datadog.model.Status; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; -import org.jenkinsci.plugins.workflow.graph.FlowEndNode; -import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.datadog.jenkins.plugins.datadog.util.TagsUtil; /** @@ -43,53 +36,15 @@ public class DatadogTracePipelineLogic extends DatadogBasePipelineLogic { @Nonnull @Override - public Collection execute(FlowNode flowNode, Run run) { - Collection traces = collectTraces(flowNode, run); - return traces.stream().map(jsonTraceSpanMapper::map).collect(Collectors.toList()); + public JSONObject toJson(BuildPipelineNode flowNode, Run run) throws IOException, InterruptedException { + TraceSpan span = toSpan(flowNode, run); + return jsonTraceSpanMapper.map(span); } // hook for tests - public Collection collectTraces(FlowNode flowNode, Run run) { - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return Collections.emptySet(); - } - - final IsPipelineAction isPipelineAction = run.getAction(IsPipelineAction.class); - if(isPipelineAction == null) { - run.addAction(new IsPipelineAction()); - } - - final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); - if(buildSpanAction == null) { - return Collections.emptySet(); - } - - final BuildData buildData = buildSpanAction.getBuildData(); - if(!DatadogUtilities.isLastNode(flowNode)){ - updateCIGlobalTags(run); - return Collections.emptySet(); - } - - final TraceSpan.TraceSpanContext traceSpanContext = buildSpanAction.getBuildSpanContext(); - final BuildPipelineNode root = buildPipelineTree((FlowEndNode) flowNode); - - try { - return collectTraces(run, buildData, root, traceSpanContext); - } finally { - // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. - cleanUpTraceActions(run); - } - } - - private Collection collectTraces(final Run run, final BuildData buildData, final BuildPipelineNode current, final TraceSpan.TraceSpanContext parentSpanContext) { - if(!isTraceable(current)) { - Collection traces = new ArrayList<>(); - // If the current node is not traceable, we continue with its children - for(final BuildPipelineNode child : current.getChildren()) { - traces.addAll(collectTraces(run, buildData, child, parentSpanContext)); - } - return traces; - } + @Nonnull + public TraceSpan toSpan(BuildPipelineNode current, Run run) throws IOException, InterruptedException { + BuildData buildData = new BuildData(run, DatadogUtilities.getTaskListener(run)); // If the root span has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines spans // because this time will be subtracted in the root span. See DatadogTraceBuildLogic#finishBuildTrace method. @@ -98,8 +53,8 @@ private Collection collectTraces(final Run run, final BuildData final long fixedEndTimeNanos = TimeUnit.MICROSECONDS.toNanos(current.getEndTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue)); // At this point, the current node is traceable. - final TraceSpan.TraceSpanContext spanContext = new TraceSpan.TraceSpanContext(parentSpanContext.getTraceId(), parentSpanContext.getSpanId(), current.getSpanId()); - final TraceSpan span = new TraceSpan(buildOperationName(current), fixedStartTimeNanos + DatadogUtilities.getNanosInQueue(current), spanContext); + final TraceSpan.TraceSpanContext spanContext = new TraceSpan.TraceSpanContext(current.getTraceId(), current.getParentSpanId(), current.getSpanId()); + final TraceSpan span = new TraceSpan(buildOperationName(current), fixedStartTimeNanos + current.getNanosInQueue(), spanContext); span.setServiceName(DatadogUtilities.getDatadogGlobalDescriptor().getCiInstanceName()); span.setResourceName(current.getName()); span.setType("ci"); @@ -126,65 +81,60 @@ private Collection collectTraces(final Run run, final BuildData } } - Collection traces = new ArrayList<>(); - for(final BuildPipelineNode child : current.getChildren()) { - traces.addAll(collectTraces(run, buildData, child, span.context())); - } - //Logs //NOTE: Implement sendNodeLogs span.setEndNano(fixedEndTimeNanos); - - traces.add(span); - return traces; + return span; } private Map buildTraceMetrics(BuildPipelineNode current) { final Map metrics = new HashMap<>(); - metrics.put(CITags.QUEUE_TIME, TimeUnit.NANOSECONDS.toSeconds(DatadogUtilities.getNanosInQueue(current))); + metrics.put(CITags.QUEUE_TIME, TimeUnit.NANOSECONDS.toSeconds(current.getNanosInQueue())); return metrics; } - private Map buildTraceTags(final Run run, final BuildPipelineNode current, final BuildData buildData) { + private Map buildTraceTags(final Run run, final BuildPipelineNode current, final BuildData buildData) { final String prefix = current.getType().getTagName(); final String buildLevel = current.getType().getBuildLevel(); - final Map envVars = current.getEnvVars(); final Map tags = new HashMap<>(); tags.put(CITags.CI_PROVIDER_NAME, CI_PROVIDER); tags.put(CITags._DD_ORIGIN, ORIGIN_CIAPP_PIPELINE); tags.put(prefix + CITags._NAME, current.getName()); tags.put(prefix + CITags._NUMBER, current.getId()); - final String status = statusFromResult(current.getResult()); - tags.put(prefix + CITags._RESULT, status); - tags.put(CITags.STATUS, status); + Status status = current.getStatus(); + tags.put(prefix + CITags._RESULT, status.toTag()); + tags.put(CITags.STATUS, status.toTag()); // Pipeline Parameters if(!buildData.getBuildParameters().isEmpty()) { - tags.put(CITags.CI_PARAMETERS, toJson(buildData.getBuildParameters())); + tags.put(CITags.CI_PARAMETERS, DatadogUtilities.toJson(buildData.getBuildParameters())); } - final String url = envVars.get("BUILD_URL") != null ? envVars.get("BUILD_URL") : buildData.getBuildUrl(""); + String url = buildData.getBuildUrl(""); if(StringUtils.isNotBlank(url)) { tags.put(prefix + CITags._URL, url + "execution/node/"+current.getId()+"/"); } - final String workspace = current.getWorkspace() != null ? current.getWorkspace() : buildData.getWorkspace(""); + final String workspace = firstNonNull(current.getWorkspace(), buildData.getWorkspace("")); tags.put(CITags.WORKSPACE_PATH, workspace); - tags.put(CITags._DD_CI_INTERNAL, current.isInternal()); - if(!current.isInternal()) { - tags.put(CITags._DD_CI_BUILD_LEVEL, buildLevel); - tags.put(CITags._DD_CI_LEVEL, buildLevel); + tags.put(CITags._DD_CI_INTERNAL, false); + tags.put(CITags._DD_CI_BUILD_LEVEL, buildLevel); + tags.put(CITags._DD_CI_LEVEL, buildLevel); + + String jenkinsResult = current.getJenkinsResult(); + if (jenkinsResult != null) { + tags.put(CITags.JENKINS_RESULT, jenkinsResult.toLowerCase()); } - tags.put(CITags.JENKINS_RESULT, current.getResult().toLowerCase()); + tags.put(CITags.ERROR, String.valueOf(current.isError() || current.isUnstable())); //Git Info - final String rawGitBranch = GitUtils.resolveGitBranch(envVars, buildData); - String gitBranch = null; - String gitTag = null; + String rawGitBranch = buildData.getBranch(""); + String gitBranch; + String gitTag; if(rawGitBranch != null && !rawGitBranch.isEmpty()) { gitBranch = normalizeBranch(rawGitBranch); if(gitBranch != null) { @@ -199,7 +149,7 @@ private Map buildTraceTags(final Run run, final BuildPipelineNod // If the user set DD_GIT_TAG manually, // we override the git.tag value. - gitTag = GitUtils.resolveGitTag(envVars, buildData); + gitTag = buildData.getGitTag(""); if(StringUtils.isNotEmpty(gitTag)){ tags.put(CITags.GIT_TAG, gitTag); } @@ -208,40 +158,36 @@ private Map buildTraceTags(final Run run, final BuildPipelineNod // If we could not detect a valid commit, that means that the GIT_COMMIT environment variable // was overridden by the user at top level, so we set the content what we have (despite it's not valid). // We will show a logger.warning at the end of the pipeline. - final String gitCommit = GitUtils.resolveGitCommit(envVars, buildData); + String gitCommit = buildData.getGitCommit(""); if(gitCommit != null && !gitCommit.isEmpty()) { tags.put(CITags.GIT_COMMIT__SHA, gitCommit); //Maintain retrocompatibility tags.put(CITags.GIT_COMMIT_SHA, gitCommit); } - final String gitRepoUrl = GitUtils.resolveGitRepositoryUrl(envVars, buildData); + String gitRepoUrl = buildData.getGitUrl(""); if (gitRepoUrl != null && !gitRepoUrl.isEmpty()) { tags.put(CITags.GIT_REPOSITORY_URL, filterSensitiveInfo(gitRepoUrl)); } // User info - final String user = envVars.get("USER") != null ? envVars.get("USER") : buildData.getUserId(); + String user = buildData.getUserId(); tags.put(CITags.USER_NAME, user); // Node info - final String nodeName = getNodeName(run, current, buildData); + final String nodeName = getNodeName(current, buildData); tags.put(CITags.NODE_NAME, nodeName); - final String nodeLabels = toJson(getNodeLabels(run, current, nodeName)); - if(!nodeLabels.isEmpty()){ + final String nodeLabels = DatadogUtilities.toJson(getNodeLabels(run, current, nodeName)); + if(nodeLabels != null && !nodeLabels.isEmpty()){ tags.put(CITags.NODE_LABELS, nodeLabels); + } else { + tags.put(CITags.NODE_LABELS, "[]"); } // If the NodeName == "master", we don't set _dd.hostname. It will be overridden by the Datadog Agent. (Traces are only available using Datadog Agent) if(!DatadogUtilities.isMainNode(nodeName)) { - final String workerHostname = getNodeHostname(run, current); - // If the worker hostname is equals to controller hostname but the node name is not "master" - // then we could not detect the worker hostname properly. We set _dd.hostname to 'none' explicitly. - if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) { - tags.put(CITags._DD_HOSTNAME, HOSTNAME_NONE); - } else { - tags.put(CITags._DD_HOSTNAME, (workerHostname != null) ? workerHostname : HOSTNAME_NONE); - } + final String workerHostname = getNodeHostname(current, buildData); + tags.put(CITags._DD_HOSTNAME, !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE); } // Arguments @@ -278,15 +224,15 @@ private Map buildTraceTags(final Run run, final BuildPipelineNod } // CI Tags propagation - final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class); - if(ciGlobalTagsAction != null) { - final Map globalTags = ciGlobalTagsAction.getTags(); - for(Map.Entry globalTagEntry : globalTags.entrySet()) { - tags.put(globalTagEntry.getKey(), globalTagEntry.getValue()); - } - } + Map globalTags = new HashMap<>(buildData.getTagsForTraces()); + globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run))); + tags.putAll(globalTags); return tags; } + private T firstNonNull(T first, T second) { + return first != null ? first : second; + } + } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java index 6e0cc9313..1dfc0f545 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java @@ -8,18 +8,20 @@ import hudson.model.Run; import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.logging.Logger; +import javax.annotation.Nullable; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.datadog.jenkins.plugins.datadog.util.TagsUtil; /** * Keeps the logic to send webhooks related to Jenkins Build. @@ -29,8 +31,9 @@ public class DatadogWebhookBuildLogic extends DatadogBaseBuildLogic { private static final Logger logger = Logger.getLogger(DatadogWebhookBuildLogic.class.getName()); + @Nullable @Override - public JSONObject finishBuildTrace(final BuildData buildData, final Run run) { + public JSONObject toJson(final BuildData buildData, final Run run) { if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { return null; } @@ -45,18 +48,12 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run return null; } - // In this point of the execution, the BuildData stored within - // BuildSpanAction has been updated by the information available - // inside the Pipeline steps by DatadogWebhookPipelineLogic. - // (Only applicable if the build is based on Jenkins Pipelines). - final BuildData updatedBuildData = buildSpanAction.getBuildData(); - final long startTimeMillis = buildData.getStartTime(0L); // If the build is a Jenkins Pipeline, the queue time is included in the root duration. // We need to adjust the endTime of the root subtracting the queue time reported by its children. // The propagated queue time is set DatadogTracePipelineLogic#updateBuildData method. // The queue time reported by DatadogBuildListener#onStarted method is not included in the root duration. - final long propagatedMillisInQueue = Math.max(updatedBuildData.getPropagatedMillisInQueue(-1L), 0); + final long propagatedMillisInQueue = Math.max(buildData.getPropagatedMillisInQueue(-1L), 0); // Although the queue time happens before the startTime, we cannot remove it from the startTime // because there is no API to do it at the end of the trace. Additionally, we cannot create the root // at the end of the build, because we would lose the logs correlation. @@ -66,11 +63,11 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run final String jenkinsResult = buildData.getResult(""); final String status = statusFromResult(jenkinsResult); final String prefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); - final String rawGitBranch = buildData.getBranch("").isEmpty() ? updatedBuildData.getBranch("") : buildData.getBranch(""); + final String rawGitBranch = buildData.getBranch(""); final String gitBranch = normalizeBranch(rawGitBranch); // Check if the user set manually the DD_GIT_TAG environment variable. // Otherwise, Jenkins reports the tag in the Git branch information. (e.g. origin/tags/0.1.0) - final String gitTag = Optional.of(buildData.getGitTag("").isEmpty() ? updatedBuildData.getGitTag("") : buildData.getGitTag("")) + final String gitTag = Optional.of(buildData.getGitTag("")) .filter(tag -> !tag.isEmpty()) .orElse(normalizeTag(rawGitBranch)); @@ -80,7 +77,7 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run payload.put("start", DatadogUtilities.toISO8601(new Date(startTimeMillis))); payload.put("end", DatadogUtilities.toISO8601(new Date(endTimeMillis))); payload.put("partial_retry", false); - payload.put("queue_time", getMillisInQueue(updatedBuildData)); + payload.put("queue_time", getMillisInQueue(buildData)); payload.put("status", status); payload.put("is_manual", isTriggeredManually(run)); @@ -115,12 +112,11 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run { JSONArray tagsPayload = new JSONArray(); - final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class); - if(ciGlobalTagsAction != null) { - final Map tags = ciGlobalTagsAction.getTags(); - for(Map.Entry tagEntry : tags.entrySet()) { - tagsPayload.add(tagEntry.getKey() + ":" + tagEntry.getValue()); - } + Map globalTags = new HashMap<>(buildData.getTagsForTraces()); + globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run))); + + for(Map.Entry tagEntry : globalTags.entrySet()) { + tagsPayload.add(tagEntry.getKey() + ":" + tagEntry.getValue()); } // Jenkins specific @@ -158,25 +154,16 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run { JSONObject nodePayload = new JSONObject(); - final String nodeName = getNodeName(run, buildData, updatedBuildData); + final String nodeName = buildData.getNodeName(""); nodePayload.put("name", nodeName); if(!DatadogUtilities.isMainNode(nodeName)) { - - final String workerHostname = getNodeHostname(run, updatedBuildData); - - // If the worker hostname is equals to controller hostname but the node name is not master/built-in then we - // could not detect the worker hostname properly. Check if it's set in the environment, otherwise set to none. - if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) { - String envHostnameOrNone = DatadogUtilities.getHostnameFromWorkerEnv(run).orElse(HOSTNAME_NONE); - nodePayload.put("hostname", envHostnameOrNone); - } else { - nodePayload.put("hostname", (workerHostname != null) ? workerHostname : HOSTNAME_NONE); - } + final String workerHostname = buildData.getHostname(""); + nodePayload.put("hostname", !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE); } else { nodePayload.put("hostname", DatadogUtilities.getHostname(null)); } - final String workspace = buildData.getWorkspace("").isEmpty() ? updatedBuildData.getWorkspace("") : buildData.getWorkspace(""); + final String workspace = buildData.getWorkspace(""); nodePayload.put("workspace", workspace); final Set nodeLabels = getNodeLabels(run, nodeName); @@ -185,7 +172,6 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run payload.put("node", nodePayload); } - // Git info { JSONObject gitPayload = new JSONObject(); @@ -198,54 +184,54 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run run gitPayload.put("tag", gitTag); } - final String gitCommit = buildData.getGitCommit("").isEmpty() ? updatedBuildData.getGitCommit("") : buildData.getGitCommit(""); + final String gitCommit = buildData.getGitCommit(""); if(!isValidCommit(gitCommit)) { logger.warning("Couldn't find a valid commit for pipelineID '"+buildData.getBuildTag("")+"'. GIT_COMMIT environment variable was not found or has invalid SHA1 string: " + gitCommit); } else { gitPayload.put("sha", gitCommit); } - final String gitRepoUrl = buildData.getGitUrl("").isEmpty() ? updatedBuildData.getGitUrl("") : buildData.getGitUrl(""); + final String gitRepoUrl = buildData.getGitUrl(""); if (gitRepoUrl != null && !gitRepoUrl.isEmpty()) { gitPayload.put("repository_url", filterSensitiveInfo(gitRepoUrl)); } - final String gitMessage = buildData.getGitMessage("").isEmpty() ? updatedBuildData.getGitMessage("") : buildData.getGitMessage(""); + final String gitMessage = buildData.getGitMessage(""); if (gitMessage != null && !gitMessage.isEmpty()) { gitPayload.put("message", gitMessage); } - final String gitAuthorDate = buildData.getGitAuthorDate("").isEmpty() ? updatedBuildData.getGitAuthorDate("") : buildData.getGitAuthorDate(""); + final String gitAuthorDate = buildData.getGitAuthorDate(""); if (gitAuthorDate != null && !gitAuthorDate.isEmpty()) { gitPayload.put("author_time", gitAuthorDate); } - final String gitCommitDate = buildData.getGitCommitterDate("").isEmpty() ? updatedBuildData.getGitCommitterDate("") : buildData.getGitCommitterDate(""); + final String gitCommitDate = buildData.getGitCommitterDate(""); if (gitCommitDate != null && !gitCommitDate.isEmpty()) { gitPayload.put("commit_time", gitCommitDate); } - final String gitCommitterName = buildData.getGitCommitterName("").isEmpty() ? updatedBuildData.getGitCommitterName("") : buildData.getGitCommitterName(""); + final String gitCommitterName = buildData.getGitCommitterName(""); if (gitCommitterName != null && !gitCommitterName.isEmpty()) { gitPayload.put("committer_name", gitCommitterName); } - final String gitCommitterEmail = buildData.getGitCommitterEmail("").isEmpty() ? updatedBuildData.getGitCommitterEmail("") : buildData.getGitCommitterEmail(""); + final String gitCommitterEmail = buildData.getGitCommitterEmail(""); if (gitCommitterEmail != null && !gitCommitterEmail.isEmpty()) { gitPayload.put("committer_email", gitCommitterEmail); } - final String gitAuthorName = buildData.getGitAuthorName("").isEmpty() ? updatedBuildData.getGitAuthorName("") : buildData.getGitAuthorName(""); + final String gitAuthorName = buildData.getGitAuthorName(""); if (gitAuthorName != null && !gitAuthorName.isEmpty()) { gitPayload.put("author_name", gitAuthorName); } - final String gitAuthorEmail = buildData.getGitAuthorEmail("").isEmpty() ? updatedBuildData.getGitAuthorEmail("") : buildData.getGitAuthorEmail(""); + final String gitAuthorEmail = buildData.getGitAuthorEmail(""); if (gitAuthorEmail != null && !gitAuthorEmail.isEmpty()) { gitPayload.put("author_email", gitAuthorEmail); } - final String gitDefaultBranch = buildData.getGitDefaultBranch("").isEmpty() ? updatedBuildData.getGitDefaultBranch("") : buildData.getGitDefaultBranch(""); + final String gitDefaultBranch = buildData.getGitDefaultBranch(""); if (gitDefaultBranch != null && !gitDefaultBranch.isEmpty()) { gitPayload.put("default_branch", gitDefaultBranch); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java index 9d5e641e2..b6ea079f0 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java @@ -1,18 +1,15 @@ package org.datadog.jenkins.plugins.datadog.traces; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.cleanUpTraceActions; -import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.statusFromResult; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.filterSensitiveInfo; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag; import hudson.model.Run; +import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -23,11 +20,8 @@ import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; -import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; -import org.jenkinsci.plugins.workflow.graph.FlowEndNode; -import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.datadog.jenkins.plugins.datadog.model.Status; +import org.datadog.jenkins.plugins.datadog.util.TagsUtil; /** * Keeps the logic to send webhooks related to inner jobs of Jenkins Pipelines (datadog levels: stage and job). @@ -37,88 +31,55 @@ public class DatadogWebhookPipelineLogic extends DatadogBasePipelineLogic { @Nonnull @Override - public Collection execute(FlowNode flowNode, Run run) { + public JSONObject toJson(BuildPipelineNode current, Run run) throws IOException, InterruptedException { + BuildData buildData = new BuildData(run, DatadogUtilities.getTaskListener(run)); - if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) { - return Collections.emptySet(); - } + JSONObject payload = new JSONObject(); + payload.put("partial_retry", false); - final IsPipelineAction isPipelineAction = run.getAction(IsPipelineAction.class); - if(isPipelineAction == null) { - run.addAction(new IsPipelineAction()); - } + long traceId = current.getTraceId(); + payload.put("trace_id", traceId); - final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class); - if(buildSpanAction == null) { - return Collections.emptySet(); - } + long parentSpanId = current.getParentSpanId(); + payload.put("parent_span_id", parentSpanId); - final BuildData buildData = buildSpanAction.getBuildData(); - if(!DatadogUtilities.isLastNode(flowNode)){ - updateCIGlobalTags(run); - return Collections.emptySet(); - } + long spanId = current.getSpanId(); + payload.put("span_id", spanId); - final TraceSpan.TraceSpanContext traceSpanContext = buildSpanAction.getBuildSpanContext(); - final BuildPipelineNode root = buildPipelineTree((FlowEndNode) flowNode); - try { - return collectTraces(run, buildData, root, null, traceSpanContext); - } finally { - // Explicit removal of InvisibleActions used to collect Traces when the Run finishes. - cleanUpTraceActions(run); - } - } + payload.put("id", current.getId()); + payload.put("name", current.getName()); - private Collection collectTraces(final Run run, final BuildData buildData, final BuildPipelineNode current, final BuildPipelineNode parent, final TraceSpan.TraceSpanContext parentSpanContext) { + final String buildLevel = current.getType().getBuildLevel(); + payload.put("level", buildLevel); - if(!isTraceable(current)) { - Collection traces = new ArrayList<>(); - // If the current node is not traceable, we continue with its children - for(final BuildPipelineNode child : current.getChildren()) { - traces.addAll(collectTraces(run, buildData, child, parent, parentSpanContext)); - } - return traces; - } // If the root has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines // because this time will be subtracted in the root. See DatadogTraceBuildLogic#finishBuildTrace method. final long propagatedMillisInQueue = Math.max(buildData.getPropagatedMillisInQueue(-1L), 0); - final long fixedStartTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getStartTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue)); - final long fixedEndTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getEndTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue)); - final String jenkinsResult = current.getResult(); - final String status = statusFromResult(jenkinsResult); - final String prefix = current.getType().getTagName(); - final String buildLevel = current.getType().getBuildLevel(); - - final TraceSpan.TraceSpanContext spanContext = new TraceSpan.TraceSpanContext(parentSpanContext.getTraceId(), parentSpanContext.getSpanId(), current.getSpanId()); - final TraceSpan span = new TraceSpan(buildOperationName(current), TimeUnit.MILLISECONDS.toNanos(fixedStartTimeMillis + propagatedMillisInQueue), spanContext); - final Map envVars = current.getEnvVars(); - - JSONObject payload = new JSONObject(); - payload.put("level", buildLevel); - final String url = envVars.get("BUILD_URL") != null ? envVars.get("BUILD_URL") : buildData.getBuildUrl(""); - if(StringUtils.isNotBlank(url)) { - payload.put("url", url + "execution/node/"+current.getId()+"/"); - } + final long fixedStartTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getStartTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue)); payload.put("start", DatadogUtilities.toISO8601(new Date(fixedStartTimeMillis))); + + final long fixedEndTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getEndTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue)); payload.put("end", DatadogUtilities.toISO8601(new Date(fixedEndTimeMillis))); - payload.put("partial_retry", false); - payload.put("queue_time", TimeUnit.NANOSECONDS.toMillis(DatadogUtilities.getNanosInQueue(current))); - payload.put("status", status); - payload.put("trace_id", spanContext.getTraceId()); - payload.put("span_id", spanContext.getSpanId()); - payload.put("parent_span_id", spanContext.getParentId()); + payload.put("queue_time", TimeUnit.NANOSECONDS.toMillis(current.getNanosInQueue())); - payload.put("id", current.getId()); - payload.put("name", current.getName()); + Status status = current.getStatus(); + payload.put("status", status.toTag()); payload.put("pipeline_unique_id", buildData.getBuildTag("")); payload.put("pipeline_name", buildData.getBaseJobName("")); + + String url = buildData.getBuildUrl(""); + if (StringUtils.isNotBlank(url)) { + payload.put("url", url + "execution/node/" + current.getId() + "/"); + } + if (buildLevel.equals("stage")) { - if (parent != null && parent.getType().getBuildLevel() == "stage") { + String parentStageId = current.getStageId(); + if (parentStageId != null) { // Stage is a child of another stage - payload.put("parent_stage_id", parent.getStageId()); + payload.put("parent_stage_id", parentStageId); } } else if (buildLevel.equals("job")) { payload.put("stage_id", current.getStageId()); @@ -126,10 +87,8 @@ private Collection collectTraces(final Run run, final BuildData buil } // Errors - if(current.isError() && current.getErrorObj() != null) { - + if (current.isError() && current.getErrorObj() != null) { JSONObject errPayload = new JSONObject(); - final Throwable error = current.getErrorObj(); errPayload.put("message", error.getMessage()); errPayload.put("type", error.getClass().getName()); @@ -139,7 +98,7 @@ private Collection collectTraces(final Run run, final BuildData buil errPayload.put("stack", errorString.toString()); payload.put("error", errPayload); - } else if(current.isUnstable() && current.getUnstableMessage() != null){ + } else if (current.isUnstable() && current.getUnstableMessage() != null) { JSONObject errPayload = new JSONObject(); errPayload.put("message", current.getUnstableMessage()); errPayload.put("type", "unstable"); @@ -151,18 +110,12 @@ private Collection collectTraces(final Run run, final BuildData buil { JSONObject nodePayload = new JSONObject(); - final String nodeName = getNodeName(run, current, buildData); + final String nodeName = getNodeName(current, buildData); nodePayload.put("name", nodeName); - if(!DatadogUtilities.isMainNode(nodeName)) { - final String workerHostname = getNodeHostname(run, current); - // If the worker hostname is equals to controller hostname but the node name is not "master" - // then we could not detect the worker hostname properly. We set _dd.hostname to 'none' explicitly. - if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) { - nodePayload.put("hostname", HOSTNAME_NONE); - } else { - nodePayload.put("hostname", (workerHostname != null) ? workerHostname : HOSTNAME_NONE); - } + if (!DatadogUtilities.isMainNode(nodeName)) { + final String workerHostname = getNodeHostname(current, buildData); + nodePayload.put("hostname", (workerHostname != null) ? workerHostname : HOSTNAME_NONE); } else { nodePayload.put("hostname", DatadogUtilities.getHostname(null)); } @@ -180,24 +133,24 @@ private Collection collectTraces(final Run run, final BuildData buil { JSONObject gitPayload = new JSONObject(); - final String rawGitBranch = GitUtils.resolveGitBranch(envVars, buildData); - String gitBranch = null; - String gitTag = null; - if(rawGitBranch != null && !rawGitBranch.isEmpty()) { + String rawGitBranch = buildData.getBranch(""); + String gitBranch; + String gitTag; + if (rawGitBranch != null && !rawGitBranch.isEmpty()) { gitBranch = normalizeBranch(rawGitBranch); - if(gitBranch != null) { + if (gitBranch != null) { gitPayload.put("branch", gitBranch); } gitTag = normalizeTag(rawGitBranch); - if(gitTag != null) { + if (gitTag != null) { gitPayload.put("tag", gitTag); } } // If the user set DD_GIT_TAG manually, // we override the git.tag value. - gitTag = GitUtils.resolveGitTag(envVars, buildData); - if(StringUtils.isNotEmpty(gitTag)){ + gitTag = buildData.getGitTag(""); + if (StringUtils.isNotEmpty(gitTag)) { gitPayload.put("tag", gitTag); } @@ -205,12 +158,12 @@ private Collection collectTraces(final Run run, final BuildData buil // If we could not detect a valid commit, that means that the GIT_COMMIT environment variable // was overridden by the user at top level, so we set the content what we have (despite it's not valid). // We will show a logger.warning at the end of the pipeline. - final String gitCommit = GitUtils.resolveGitCommit(envVars, buildData); - if(gitCommit != null && !gitCommit.isEmpty()) { + String gitCommit = buildData.getGitCommit(""); + if (gitCommit != null && !gitCommit.isEmpty()) { gitPayload.put("sha", gitCommit); } - final String gitRepoUrl = GitUtils.resolveGitRepositoryUrl(envVars, buildData); + String gitRepoUrl = buildData.getGitUrl(""); if (gitRepoUrl != null && !gitRepoUrl.isEmpty()) { gitPayload.put("repository_url", filterSensitiveInfo(gitRepoUrl)); } @@ -260,9 +213,8 @@ private Collection collectTraces(final Run run, final BuildData buil // User { - // User JSONObject userPayload = new JSONObject(); - final String user = envVars.get("USER") != null ? envVars.get("USER") : buildData.getUserId(); + String user = buildData.getUserId(); userPayload.put("name", user); if (StringUtils.isNotEmpty(buildData.getUserEmail(""))) { userPayload.put("email", buildData.getUserEmail("")); @@ -284,42 +236,39 @@ private Collection collectTraces(final Run run, final BuildData buil { JSONArray tagsPayload = new JSONArray(); - final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class); - if(ciGlobalTagsAction != null) { - final Map globalTags = ciGlobalTagsAction.getTags(); - for(Map.Entry globalTagEntry : globalTags.entrySet()) { - tagsPayload.add(globalTagEntry.getKey() + ":" + globalTagEntry.getValue()); - } + Map globalTags = new HashMap<>(buildData.getTagsForTraces()); + globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run))); + + for (Map.Entry globalTagEntry : globalTags.entrySet()) { + tagsPayload.add(globalTagEntry.getKey() + ":" + globalTagEntry.getValue()); } // Jenkins specific - tagsPayload.add(CITags._DD_CI_INTERNAL + ":" + current.isInternal()); + tagsPayload.add(CITags._DD_CI_INTERNAL + ":false"); + + String jenkinsResult = current.getJenkinsResult(); if (StringUtils.isNotEmpty(jenkinsResult)) { tagsPayload.add(CITags.JENKINS_RESULT + ":" + jenkinsResult.toLowerCase()); } + final String prefix = current.getType().getTagName(); + // For backwards compat - tagsPayload.add(prefix + CITags._RESULT + ":" + status); + tagsPayload.add(prefix + CITags._RESULT + ":" + status.toTag()); // Arguments final String nodePrefix = current.getType().name().toLowerCase(); - for(Map.Entry entry : current.getArgs().entrySet()) { - tagsPayload.add(CI_PROVIDER + "." + nodePrefix + ".args."+entry.getKey() + ":" + String.valueOf(entry.getValue())); - if("script".equals(entry.getKey())){ - tagsPayload.add(prefix + ".script" + ":" + String.valueOf(entry.getValue())); + for (Map.Entry entry : current.getArgs().entrySet()) { + tagsPayload.add(CI_PROVIDER + "." + nodePrefix + ".args." + entry.getKey() + ":" + entry.getValue()); + if ("script".equals(entry.getKey())) { + tagsPayload.add(prefix + ".script" + ":" + entry.getValue()); } } payload.put("tags", tagsPayload); } - Collection traces = new ArrayList<>(); - for(final BuildPipelineNode child : current.getChildren()) { - traces.addAll(collectTraces(run, buildData, child, current, span.context())); - } - - traces.add(payload); - return traces; + return payload; } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java deleted file mode 100644 index 5c7438886..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java +++ /dev/null @@ -1,9 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.traces; - -import hudson.model.InvisibleAction; - -import java.io.Serializable; - -public class IsPipelineAction extends InvisibleAction implements Serializable { - private static final long serialVersionUID = 1L; -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java deleted file mode 100644 index 750f990b2..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.traces; - -import hudson.model.InvisibleAction; -import hudson.model.Run; -import org.datadog.jenkins.plugins.datadog.model.StepData; -import org.jenkinsci.plugins.workflow.graph.FlowNode; - -import java.io.Serializable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -/** - * Keeps the Step data during a certain Run. - */ -public class StepDataAction extends InvisibleAction implements Serializable { - - private static final long serialVersionUID = 1L; - - private final ConcurrentMap stepDataByDescriptor = new ConcurrentHashMap<>(); - - public StepData put(final Run run, final FlowNode flowNode, final StepData stepData) { - return stepDataByDescriptor.put(flowNode.getId(), stepData); - } - - public StepData get(final Run run, final FlowNode flowNode) { - return stepDataByDescriptor.get(flowNode.getId()); - } - -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java deleted file mode 100644 index 8734ee52a..000000000 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java +++ /dev/null @@ -1,26 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.traces; - -import hudson.model.InvisibleAction; -import hudson.model.Run; -import org.datadog.jenkins.plugins.datadog.model.StepTraceData; -import org.jenkinsci.plugins.workflow.graph.FlowNode; - -import java.io.Serializable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; - -public class StepTraceDataAction extends InvisibleAction implements Serializable { - - private static final long serialVersionUID = 1L; - - private final ConcurrentMap stepTraceDataByDescriptor = new ConcurrentHashMap<>(); - - public StepTraceData put(final Run run, final FlowNode flowNode, final StepTraceData stepTraceData) { - return stepTraceDataByDescriptor.put(flowNode.getId(), stepTraceData); - } - - public StepTraceData get(final Run run, final FlowNode flowNode) { - return stepTraceDataByDescriptor.get(flowNode.getId()); - } - -} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java index fb7c71bfa..70f8121fd 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java @@ -7,16 +7,15 @@ import hudson.Extension; import hudson.model.Run; import hudson.model.TaskListener; +import java.io.IOException; +import java.util.logging.Logger; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.model.StepTraceData; +import org.datadog.jenkins.plugins.datadog.model.TraceInfoAction; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.steps.StepContext; import org.jenkinsci.plugins.workflow.steps.StepEnvironmentContributor; -import java.io.IOException; -import java.util.logging.Logger; - @Extension public class TraceStepEnvironmentContributor extends StepEnvironmentContributor { @@ -41,12 +40,6 @@ public void buildEnvironmentFor(StepContext stepContext, EnvVars envs, TaskListe return; } - final StepTraceDataAction stepTraceDataAction = run.getAction(StepTraceDataAction.class); - if(stepTraceDataAction == null) { - logger.fine("Unable to set trace ids as environment variables. in Run '"+run.getFullDisplayName()+"'. StepTraceDataAction is null"); - return; - } - final FlowNode flowNode = stepContext.get(FlowNode.class); if(flowNode == null) { logger.fine("Unable to set trace ids as environment variables. in Run '"+run.getFullDisplayName()+"'. FlowNode is null"); @@ -57,22 +50,20 @@ public void buildEnvironmentFor(StepContext stepContext, EnvVars envs, TaskListe return; } - StepTraceData stepTraceData = stepTraceDataAction.get(run, flowNode); - if(stepTraceData == null){ - stepTraceData = new StepTraceData(IdGenerator.generate()); - stepTraceDataAction.put(run, flowNode, stepTraceData); - } - if(envs.get(TRACE_ID_ENVVAR_KEY) == null) { final String traceIdStr = Long.toUnsignedString(buildSpanAction.getBuildSpanContext().getTraceId()); envs.put(TRACE_ID_ENVVAR_KEY, traceIdStr); logger.fine("Set DD_CUSTOM_TRACE_ID="+traceIdStr+" for FlowNode: "+flowNode); } - if(envs.get(SPAN_ID_ENVVAR_KEY) == null) { - final String spanIdStr = Long.toUnsignedString(stepTraceData.getSpanId()); - envs.put(SPAN_ID_ENVVAR_KEY, spanIdStr); - logger.fine("Set DD_CUSTOM_PARENT_ID="+spanIdStr+" for FlowNode: "+flowNode); + TraceInfoAction traceInfoAction = run.getAction(TraceInfoAction.class); + if (traceInfoAction != null) { + Long spanId = traceInfoAction.getOrCreate(flowNode.getId()); + if(envs.get(SPAN_ID_ENVVAR_KEY) == null) { + final String spanIdStr = Long.toUnsignedString(spanId); + envs.put(SPAN_ID_ENVVAR_KEY, spanIdStr); + logger.fine("Set DD_CUSTOM_PARENT_ID="+spanIdStr+" for FlowNode: "+flowNode); + } } } catch (Exception ex) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java index 062b6a188..b35bc04d0 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java @@ -1,9 +1,16 @@ package org.datadog.jenkins.plugins.datadog.traces.message; +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; import java.io.Serializable; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import org.datadog.jenkins.plugins.datadog.traces.IdGenerator; +import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; public class TraceSpan { @@ -111,6 +118,15 @@ public boolean isError() { return error; } + @Override + public String toString() { + return "TraceSpan{" + + "operationName='" + operationName + '\'' + + ", serviceName='" + serviceName + '\'' + + ", resourceName='" + resourceName + '\'' + + '}'; + } + public static class TraceSpanContext implements Serializable { private static final long serialVersionUID = 1L; @@ -142,5 +158,53 @@ public long getParentId() { public long getSpanId() { return spanId; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TraceSpanContext that = (TraceSpanContext) o; + return traceId == that.traceId && parentId == that.parentId && spanId == that.spanId; + } + + @Override + public int hashCode() { + return Objects.hash(traceId, parentId, spanId); + } + + @Override + public String toString() { + return "TraceSpanContext{" + + "traceId=" + traceId + + ", parentId=" + parentId + + ", spanId=" + spanId + + '}'; + } + + public static final class ConverterImpl extends DatadogActionConverter { + public ConverterImpl(XStream xs) { + } + + @Override + public boolean canConvert(Class type) { + return TraceSpanContext.class == type; + } + + @Override + public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { + TraceSpanContext traceSpanContext = (TraceSpanContext) source; + writeField("traceId", traceSpanContext.traceId, writer, context); + writeField("parentId", traceSpanContext.parentId, writer, context); + writeField("spanId", traceSpanContext.spanId, writer, context); + } + + @Override + public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { + long traceId = readField(reader, context, long.class); + long parentId = readField(reader, context, long.class); + long spanId = readField(reader, context, long.class); + return new TraceSpanContext(traceId, parentId, spanId); + } + } } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java index 790268f9d..e4cfebc39 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java @@ -1,15 +1,17 @@ package org.datadog.jenkins.plugins.datadog.traces.write; import hudson.model.Run; -import java.util.Collection; +import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.logging.Logger; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.clients.DatadogAgentClient; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; /** * Trace write strategy that can dynamically switch from using APM track to using EVP Proxy. @@ -36,14 +38,16 @@ public AgentTraceWriteStrategy(TraceWriteStrategy evpProxyStrategy, TraceWriteSt this.checkEvpProxySupport = checkEvpProxySupport; } + @Nullable @Override public JSONObject serialize(BuildData buildData, Run run) { return getCurrentStrategy().serialize(buildData, run); } + @Nonnull @Override - public Collection serialize(FlowNode flowNode, Run run) { - return getCurrentStrategy().serialize(flowNode, run); + public JSONObject serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException { + return getCurrentStrategy().serialize(node, run); } @Override diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java index c73a7f7a4..ca6440153 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java @@ -1,16 +1,20 @@ package org.datadog.jenkins.plugins.datadog.traces.write; import hudson.model.Run; -import java.util.Collection; +import java.io.IOException; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; public interface TraceWriteStrategy { + @Nullable JSONObject serialize(BuildData buildData, Run run); - Collection serialize(FlowNode flowNode, Run run); + @Nonnull + JSONObject serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException; void send(List spans); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java index ce6e822ae..6577a7f3a 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java @@ -1,17 +1,19 @@ package org.datadog.jenkins.plugins.datadog.traces.write; import hudson.model.Run; -import java.util.Collection; +import java.io.IOException; import java.util.List; import java.util.function.Consumer; import java.util.logging.Logger; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic; import org.datadog.jenkins.plugins.datadog.util.CircuitBreaker; -import org.jenkinsci.plugins.workflow.graph.FlowNode; public class TraceWriteStrategyImpl implements TraceWriteStrategy { @@ -31,14 +33,16 @@ public TraceWriteStrategyImpl(DatadogBaseBuildLogic buildLogic, DatadogBasePipel ); } + @Nullable @Override public JSONObject serialize(final BuildData buildData, final Run run) { - return buildLogic.finishBuildTrace(buildData, run); + return buildLogic.toJson(buildData, run); } + @Nonnull @Override - public Collection serialize(FlowNode flowNode, Run run) { - return pipelineLogic.execute(flowNode, run); + public JSONObject serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException { + return pipelineLogic.toJson(node, run); } @Override diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index fc532fe05..6694d4c1e 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -1,19 +1,20 @@ package org.datadog.jenkins.plugins.datadog.traces.write; import hudson.model.Run; +import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Logger; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; public class TraceWriter { @@ -49,15 +50,13 @@ public void submitBuild(final BuildData buildData, final Run run) throws In submit(buildJson); } - public void submitPipeline(FlowNode flowNode, Run run) throws InterruptedException, TimeoutException { - Collection nodeJsons = traceWriteStrategy.serialize(flowNode, run); - for (JSONObject nodeJson : nodeJsons) { - submit(nodeJson); - } + public void submitPipeline(BuildPipelineNode node, Run run) throws InterruptedException, TimeoutException, IOException { + JSONObject nodeJson = traceWriteStrategy.serialize(node, run); + submit(nodeJson); } - private void submit(JSONObject json) throws InterruptedException, TimeoutException { - if (!queue.offer(json, getEnv(SUBMIT_TIMEOUT_ENV_VAR, DEFAULT_SUBMIT_TIMEOUT_SECONDS), TimeUnit.SECONDS)) { + private void submit(@Nullable JSONObject json) throws InterruptedException, TimeoutException { + if (json != null && !queue.offer(json, getEnv(SUBMIT_TIMEOUT_ENV_VAR, DEFAULT_SUBMIT_TIMEOUT_SECONDS), TimeUnit.SECONDS)) { throw new TimeoutException("Timed out while submitting span"); } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java new file mode 100644 index 000000000..fb5951794 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java @@ -0,0 +1,22 @@ +package org.datadog.jenkins.plugins.datadog.util; + +import com.thoughtworks.xstream.converters.Converter; +import com.thoughtworks.xstream.converters.MarshallingContext; +import com.thoughtworks.xstream.converters.UnmarshallingContext; +import com.thoughtworks.xstream.io.HierarchicalStreamReader; +import com.thoughtworks.xstream.io.HierarchicalStreamWriter; + +public abstract class DatadogActionConverter implements Converter { + protected void writeField(String name, Object value, HierarchicalStreamWriter writer, MarshallingContext context) { + writer.startNode(name); + context.convertAnother(value); + writer.endNode(); + } + + protected T readField(HierarchicalStreamReader reader, UnmarshallingContext context, Class type) { + reader.moveDown(); + T value = (T) context.convertAnother(null, type); + reader.moveUp(); + return value; + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java index 539e40d0c..6a2036f8f 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java @@ -25,10 +25,16 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.util; -import net.sf.json.JSONArray; - -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.logging.Logger; +import net.sf.json.JSONArray; public class TagsUtil { @@ -38,21 +44,15 @@ public static Map> merge(Map> dest, Map< if (dest == null) { dest = new HashMap<>(); } - if (orig == null) { - orig = new HashMap<>(); - } - for (final Iterator>> iter = orig.entrySet().iterator(); iter.hasNext();){ - Map.Entry> entry = iter.next(); - final String oName = entry.getKey(); - Set dValues = dest.containsKey(oName) ? dest.get(oName) : new HashSet(); - if (dValues == null) { - dValues = new HashSet<>(); - } - Set oValues = entry.getValue(); - if (oValues != null) { - dValues.addAll(oValues); + if (orig != null) { + for (Map.Entry> entry : orig.entrySet()) { + final String oName = entry.getKey(); + Set oValues = entry.getValue(); + Set dValues = dest.computeIfAbsent(oName, k -> new HashSet<>()); + if (oValues != null) { + dValues.addAll(oValues); + } } - dest.put(oName, dValues); } return dest; } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java index fb6d50f94..7ce19c601 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java @@ -2,7 +2,6 @@ import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_BRANCH; import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_SHA; -import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_DEFAULT_BRANCH; import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_REPOSITORY_URL; import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_TAG; import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.GIT_BRANCH; @@ -13,30 +12,20 @@ import hudson.EnvVars; import hudson.FilePath; -import hudson.model.Executor; import hudson.model.Run; import hudson.model.TaskListener; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; +import java.util.logging.Logger; +import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; -import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; -import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; -import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils; -import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.revwalk.RevCommit; import org.jenkinsci.plugins.gitclient.Git; import org.jenkinsci.plugins.gitclient.GitClient; -import org.jenkinsci.plugins.workflow.FilePathUtils; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.logging.Logger; -import java.util.regex.Pattern; public final class GitUtils { @@ -44,63 +33,20 @@ public final class GitUtils { private static transient final Pattern SHA1_PATTERN = Pattern.compile("\\b[a-f0-9]{40}\\b"); private static transient final Pattern SCP_REPO_URI_REGEX = Pattern.compile("^([\\w.~-]+@)?(?[\\w.-]+):(?[\\w./-]+)(?:\\?|$)(.*)$"); - private GitUtils(){} - - /** - * Return the FilePath based on the Node name and the Workspace. - * @param nodeName the node name to check - * @param workspace the workspace to build the path - * @return filePath for (nodeName, workspace) - */ - public static FilePath buildFilePath(final String nodeName, final String workspace) { - if(nodeName == null || workspace == null){ - LOGGER.fine("Unable to build FilePath. Either NodeName or Workspace is null"); - return null; - } - - try { - return DatadogUtilities.isMainNode(nodeName) ? new FilePath(FilePath.localChannel, workspace): FilePathUtils.find(nodeName, workspace); - } catch (Exception e) { - LOGGER.fine("Unable to build FilePath. Error: " + e); - return null; - } - } - - /** - * Return the FilePath associated with the run instance - * @param run a particular execution of a Jenkins build - * @return filePath for the run. - */ - public static FilePath buildFilePath(final Run run){ - try { - if(run == null) { - LOGGER.fine("Unable to build FilePath. Run is null"); - return null; - } - - final Executor executor = run.getExecutor(); - if(executor == null) { - LOGGER.fine("Unable to build FilePath. Run executor is null"); - return null; - } - - return executor.getCurrentWorkspace(); - } catch (Exception e) { - LOGGER.fine("Unable to build FilePath. Error: " + e); - return null; - } + private GitUtils() { } /** * Return the RevCommit for a certain commit based on the information * stored in a certain workspace of a certain node. + * * @param gitCommit the Git commit SHA to search info. * @param gitClient the Git client used. * @return revCommit */ public static RevCommit searchRevCommit(final GitClient gitClient, final String gitCommit) { try { - if(gitClient == null) { + if (gitClient == null) { LOGGER.fine("Unable to search RevCommit. GitClient is null"); return null; } @@ -114,23 +60,13 @@ public static RevCommit searchRevCommit(final GitClient gitClient, final String /** * Return the {@code RepositoryInfo} for a certain Git repository. + * * @param gitClient The Git client to use to obtain the repository information - * @param envVars the env vars available. * @return repositoryInfo */ - public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient, EnvVars envVars) { + public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient) { try { - // Check if the default branch has been configured using an environment variable by the user. - // This is needed because the automatic detection of the default branch using - // the Git client is not always possible cause it depends on how Jenkins checkouts - // the repository. Not always there is a symbolic reference to the default branch. - final String defaultBranch = GitInfoUtils.normalizeBranch(envVars.get(DD_GIT_DEFAULT_BRANCH, null)); - LOGGER.fine("Detected default branch from environment variables: " + defaultBranch); - if(defaultBranch != null && !defaultBranch.isEmpty()) { - return new RepositoryInfo(defaultBranch); - } - - if(gitClient == null){ + if (gitClient == null) { LOGGER.fine("Unable to search RevCommit. GitClient is null"); return null; } @@ -142,149 +78,23 @@ public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient, Env } } - /** - * Returns the GitCommitAction of the Run instance. - * If the Run instance does not have GitCommitAction or - * the current commit hash is different from the commit hash - * stored in the GitCommitAction, then a new GitCommitAction - * is built and stored in the Run instance. - * - * The GitCommit information is stored in an action because - * it's fairly expensive to calculate. To avoid calculating - * every time, it's store in the Run instance as an action. - * @param run a particular execution of a Jenkins build - * @param gitClient the Git client - * @param gitCommit the git commit SHA to use - * @return the GitCommitAction with the information about Git Commit. - */ - public static GitCommitAction buildGitCommitAction(final Run run, final GitClient gitClient, String gitCommit) { - long start = System.currentTimeMillis(); - try { - GitCommitAction commitAction = run.getAction(GitCommitAction.class); - if(commitAction == null || !gitCommit.equals(commitAction.getCommit())) { - try { - if(gitClient == null){ - LOGGER.fine("Unable to build GitCommitAction. GitClient is null"); - return null; - } - - final RevCommit revCommit = GitUtils.searchRevCommit(gitClient, gitCommit); - if(revCommit == null) { - LOGGER.fine("Unable to build GitCommitAction. RevCommit is null. [gitCommit: "+gitCommit+"]"); - return null; - } - - final GitCommitAction.Builder builder = GitCommitAction.newBuilder(); - builder.withCommit(gitCommit); - String message; - try { - message = StringUtils.abbreviate(revCommit.getFullMessage(), 1500); - } catch (Exception e) { - LOGGER.fine("Unable to obtain git commit full message. Selecting short message. Error: " + e); - message = revCommit.getShortMessage(); - } - builder.withMessage(message); - - final PersonIdent authorIdent = revCommit.getAuthorIdent(); - if(authorIdent != null){ - builder.withAuthorName(authorIdent.getName()) - .withAuthorEmail(authorIdent.getEmailAddress()) - .withAuthorDate(DatadogUtilities.toISO8601(authorIdent.getWhen())); - } - - final PersonIdent committerIdent = revCommit.getCommitterIdent(); - if(committerIdent != null) { - builder.withCommitterName(committerIdent.getName()) - .withCommitterEmail(committerIdent.getEmailAddress()) - .withCommitterDate(DatadogUtilities.toISO8601(committerIdent.getWhen())); - } - - commitAction = builder.build(); - run.addOrReplaceAction(commitAction); - } catch (Exception e) { - LOGGER.fine("Unable to build GitCommitAction. Error: " + e); - } - } - return commitAction; - } finally { - long end = System.currentTimeMillis(); - DatadogAudit.log("GitUtils.buildGitCommitAction", start, end); - } - } - - /** - * Returns the GitRepositoryAction of the Run instance. - * If the Run instance does not have GitRepositoryAction or - * some infor is not populated in the GitRepositoryAction, - * then a new GitCommitAction is built and stored in the Run instance. - * - * The GitRepository information is stored in an action because - * it's fairly expensive to calculate. To avoid calculating - * every time, it's store in the Run instance as an action. - * @param run a particular execution of a Jenkins build - * @param gitClient the Git client - * @param envVars the env vars available - * @param gitRepositoryURL the git repository URL to use - * @return the GitRepositoryAction with the information about Git repository. - */ - public static GitRepositoryAction buildGitRepositoryAction(Run run, GitClient gitClient, final EnvVars envVars, final String gitRepositoryURL) { - long start = System.currentTimeMillis(); - try { - GitRepositoryAction repoAction = run.getAction(GitRepositoryAction.class); - if(repoAction == null || !gitRepositoryURL.equals(repoAction.getRepositoryURL())) { - try { - if(gitClient == null){ - LOGGER.fine("Unable to build GitRepositoryAction. GitClient is null"); - return null; - } - - final RepositoryInfo repositoryInfo = GitUtils.searchRepositoryInfo(gitClient, envVars); - if(repositoryInfo == null) { - LOGGER.fine("Unable to build GitRepositoryAction. RepositoryInfo is null"); - return null; - } - - final GitRepositoryAction.Builder builder = GitRepositoryAction.newBuilder(); - builder.withRepositoryURL(gitRepositoryURL); - builder.withDefaultBranch(repositoryInfo.getDefaultBranch()); - - repoAction = builder.build(); - run.addOrReplaceAction(repoAction); - } catch (Exception e) { - LOGGER.fine("Unable to build GitRepositoryAction. Error: " + e); - } - } - return repoAction; - } finally { - long end = System.currentTimeMillis(); - DatadogAudit.log("GitUtils.buildGitRepositoryAction", start, end); - } - } - /** * Creates a new instance of a {@code GitClient}. - * @param run a particular execution of a Jenkins build - * @param listener the task listener - * @param envVars the env vars available - * @param nodeName the node name to use to build the Git client + * + * @param listener the task listener + * @param envVars the env vars available * @param workspace the workspace to use to build the Git client * @return gitClient */ - public static GitClient newGitClient(final Run run, final TaskListener listener, final EnvVars envVars, final String nodeName, final String workspace) { + public static GitClient newGitClient(final TaskListener listener, final EnvVars envVars, final FilePath workspace) { long start = System.currentTimeMillis(); - try { try { - FilePath ws = GitUtils.buildFilePath(run); - if(ws == null){ - ws = GitUtils.buildFilePath(nodeName, workspace); - } - - if(ws == null) { + if (workspace == null) { return null; } - final Git git = Git.with(listener, envVars).in(ws); + final Git git = Git.with(listener, envVars).in(workspace); return git.getClient(); } catch (Exception e) { LOGGER.fine("Unable to create GitClient. Error: " + e); @@ -298,15 +108,16 @@ public static GitClient newGitClient(final Run run, final TaskListener list /** * Check if the git commit is a valid commit. + * * @param gitCommit the git commit to evaluate * @return true if the git commit is a valid SHA 40 (HEX) */ public static boolean isValidCommit(String gitCommit) { - if(gitCommit == null || gitCommit.isEmpty()) { + if (gitCommit == null || gitCommit.isEmpty()) { return false; } - if(gitCommit.length() != 40) { + if (gitCommit.length() != 40) { return false; } @@ -315,11 +126,12 @@ public static boolean isValidCommit(String gitCommit) { /** * Check if the git repository URL is a valid repository + * * @param gitRepositoryURL the current git repository * @return true if the git repository url is a valid repository in either http or scp form. */ public static boolean isValidRepositoryURL(String gitRepositoryURL) { - if(gitRepositoryURL == null || gitRepositoryURL.isEmpty()) { + if (gitRepositoryURL == null || gitRepositoryURL.isEmpty()) { return false; } @@ -334,7 +146,8 @@ public static boolean isValidRepositoryURL(String gitRepositoryURL) { /** * Check if the GitRepositoryAction has been already created and populated. * Typically this method is used to avoid calculating the action multiple times. - * @param run the current run + * + * @param run the current run * @param gitRepositoryUrl the current git respository * @return true if the action has been created and populated. */ @@ -346,7 +159,8 @@ public static boolean isRepositoryInfoAlreadyCreated(Run run, final String /** * Check if the GitCommitAction has been already created and populated. * Typically this method is used to avoid calculating the action multiple times. - * @param run the current run + * + * @param run the current run * @param gitCommit the git commit to check for * @return true if the action has been created and populated. */ @@ -360,17 +174,15 @@ public static boolean isCommitInfoAlreadyCreated(Run run, final String git * 1: Check user supplied env var * 2: Check Jenkins env var * 3: Check BuildData already calculated + * * @param envVars the user supplied env vars - * @param buildData the build data * @return the branch value. */ - public static String resolveGitBranch(Map envVars, BuildData buildData) { - if(StringUtils.isNotEmpty(envVars.get(DD_GIT_BRANCH))){ + public static String resolveGitBranch(Map envVars) { + if (StringUtils.isNotEmpty(envVars.get(DD_GIT_BRANCH))) { return envVars.get(DD_GIT_BRANCH); - } else if (StringUtils.isNotEmpty(envVars.get(GIT_BRANCH))){ + } else if (StringUtils.isNotEmpty(envVars.get(GIT_BRANCH))) { return envVars.get(GIT_BRANCH); - } else if(buildData != null){ - return buildData.getBranch(""); } else { return null; } @@ -381,17 +193,15 @@ public static String resolveGitBranch(Map envVars, BuildData bui * 1: Check user supplied env var * 2: Check Jenkins env var * 3: Check BuildData already calculated + * * @param envVars the user supplied env vars - * @param buildData the build data * @return the commit sha value. */ - public static String resolveGitCommit(Map envVars, BuildData buildData) { - if(isValidCommit(envVars.get(DD_GIT_COMMIT_SHA))){ + public static String resolveGitCommit(Map envVars) { + if (isValidCommit(envVars.get(DD_GIT_COMMIT_SHA))) { return envVars.get(DD_GIT_COMMIT_SHA); - } else if(isValidCommit(envVars.get(GIT_COMMIT))){ + } else if (isValidCommit(envVars.get(GIT_COMMIT))) { return envVars.get(GIT_COMMIT); - } else if(buildData != null){ - return buildData.getGitCommit(""); } else { return null; } @@ -402,19 +212,17 @@ public static String resolveGitCommit(Map envVars, BuildData bui * 1: Check user supplied env var * 2: Check Jenkins env var * 3: Check BuildData already calculated + * * @param envVars the user supplied env vars - * @param buildData the build data * @return the git repository url value. */ - public static String resolveGitRepositoryUrl(Map envVars, BuildData buildData) { - if(StringUtils.isNotEmpty(envVars.get(DD_GIT_REPOSITORY_URL))){ + public static String resolveGitRepositoryUrl(Map envVars) { + if (StringUtils.isNotEmpty(envVars.get(DD_GIT_REPOSITORY_URL))) { return envVars.get(DD_GIT_REPOSITORY_URL); - } else if(StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL))) { + } else if (StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL))) { return envVars.get(GIT_REPOSITORY_URL); - } else if(StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL_ALT))){ + } else if (StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL_ALT))) { return envVars.get(GIT_REPOSITORY_URL_ALT); - } else if(buildData != null){ - return buildData.getGitUrl(""); } else { return null; } @@ -424,15 +232,13 @@ public static String resolveGitRepositoryUrl(Map envVars, BuildD * Resolve the value for the git tag based * 1: Check user supplied env var * 3: Check BuildData already calculated + * * @param envVars the user supplied environment variables - * @param buildData the build data * @return the git tag value. */ - public static String resolveGitTag(Map envVars, BuildData buildData) { - if(StringUtils.isNotEmpty(envVars.get(DD_GIT_TAG))){ + public static String resolveGitTag(Map envVars) { + if (StringUtils.isNotEmpty(envVars.get(DD_GIT_TAG))) { return envVars.get(DD_GIT_TAG); - } else if(buildData != null){ - return buildData.getGitTag(""); } else { return null; } @@ -440,11 +246,12 @@ public static String resolveGitTag(Map envVars, BuildData buildD /** * Check if the env vars map contains any environment variable with Git information supplied by the user manually. + * * @param envVars the environment variables * @return true if any of the env vars is not empty. */ public static boolean isUserSuppliedGit(Map envVars) { - if(envVars == null) { + if (envVars == null) { return false; } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java index 91f74b215..39d7fc88c 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java @@ -4,17 +4,27 @@ public class RepositoryInfo implements Serializable { - public static final RepositoryInfo EMPTY_REPOSITORY_INFO = new RepositoryInfo(""); - private static final long serialVersionUID = 1L; + private final String repoUrl; private final String defaultBranch; + private final String branch; - public RepositoryInfo(String defaultBranch) { + public RepositoryInfo(String repoUrl, String defaultBranch, String branch) { + this.repoUrl = repoUrl; this.defaultBranch = defaultBranch; + this.branch = branch; + } + + public String getRepoUrl() { + return repoUrl; } public String getDefaultBranch() { return defaultBranch; } + + public String getBranch() { + return branch; + } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java index d9f37b06c..079b1840d 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java @@ -1,18 +1,20 @@ package org.datadog.jenkins.plugins.datadog.util.git; import hudson.remoting.VirtualChannel; +import java.io.IOException; +import java.util.Set; +import java.util.logging.Logger; import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils; +import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; +import org.eclipse.jgit.lib.StoredConfig; import org.jenkinsci.plugins.gitclient.RepositoryCallback; -import java.io.IOException; -import java.util.logging.Logger; - /** * Returns the RepositoryInfo instance for a certain repository * using the JGit. - * + *

* This must be called using gitClient.withRepository(...) method. * See GitUtils. */ @@ -24,25 +26,52 @@ public final class RepositoryInfoCallback implements RepositoryCallback remoteNames = repository.getRemoteNames(); + if (!remoteNames.isEmpty()) { + return remoteNames.iterator().next(); + } + return Constants.DEFAULT_REMOTE_NAME; + } + + private String getDefaultBranch(Repository repository, String remoteName) throws Exception { + Ref remoteHead = repository.findRef("refs/remotes/" + remoteName + "/HEAD"); + if (remoteHead != null && remoteHead.isSymbolic()) { + return GitInfoUtils.normalizeBranch(remoteHead.getTarget().getName()); + } + if (repository.findRef("master") != null) { + return "master"; + } + if (repository.findRef("main") != null) { + return "main"; + } + return null; + } } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java index f44233418..4709bf93c 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java @@ -25,7 +25,12 @@ public class DatadogGlobalConfigurationTest { @ClassRule - public static JenkinsRule jenkinsRule = new JenkinsRule(); + public static JenkinsRule jenkinsRule; + + static { + jenkinsRule = new JenkinsRule(); + jenkinsRule.timeout = 300; // default value of 180 is too small for all the test cases in this class + } @Rule public JenkinsConfiguredWithCodeRule r = new JenkinsConfiguredWithCodeRule(); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java index 098351110..ae0c77004 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java @@ -25,7 +25,9 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.clients; +import com.google.common.base.Objects; import hudson.model.Run; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -35,13 +37,17 @@ of this software and associated documentation files (the "Software"), to deal import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.model.BuildData; +import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; @@ -49,7 +55,6 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; -import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.junit.Assert; public class DatadogClientStub implements DatadogClient { @@ -60,10 +65,10 @@ public class DatadogClientStub implements DatadogClient { public List logLines; public DatadogClientStub() { - this.metrics = new ArrayList<>(); - this.serviceChecks = new ArrayList<>(); - this.events = new ArrayList<>(); - this.logLines = new ArrayList<>(); + this.metrics = new CopyOnWriteArrayList<>(); + this.serviceChecks = new CopyOnWriteArrayList<>(); + this.events = new CopyOnWriteArrayList<>(); + this.logLines = new CopyOnWriteArrayList<>(); } @Override @@ -75,7 +80,7 @@ public boolean event(DatadogEvent event) { @Override public boolean incrementCounter(String name, String hostname, Map> tags) { for (DatadogMetric m : this.metrics) { - if(m.same(new DatadogMetric(name, 0, hostname, convertTagMapToList(tags)))) { + if (m.same(new DatadogMetric(name, 0, hostname, convertTagMapToList(tags)))) { double value = m.getValue() + 1; this.metrics.remove(m); this.metrics.add(new DatadogMetric(name, value, hostname, convertTagMapToList(tags))); @@ -131,7 +136,7 @@ public boolean assertMetric(String name, double value, String hostname, String[] "metrics: {" + this.metrics.toString() + " }"); return false; } - + /* * Returns the value of the asserted metric if it exists. */ @@ -154,10 +159,10 @@ public double assertMetricGetValue(String name, String hostname, String[] tags) */ public boolean assertMetricValues(String name, double value, String hostname, int count) { DatadogMetric m = new DatadogMetric(name, value, hostname, new ArrayList<>()); - + // compare without tags so metrics of the same value are considered the same. long timesSeen = this.metrics.stream().filter(x -> x.sameNoTags(m)).count(); - if (timesSeen == count){ + if (timesSeen == count) { return true; } Assert.fail("metric { " + m.toString() + " found " + timesSeen + " times, not " + count); @@ -172,7 +177,7 @@ public boolean assertMetricValuesMin(String name, double value, String hostname, // compare without tags so metrics of the same value are considered the same. long timesSeen = this.metrics.stream().filter(x -> x.sameNoTags(m)).count(); - if (timesSeen >= min){ + if (timesSeen >= min) { return true; } Assert.fail("metric { " + m.toString() + " found " + timesSeen + " times, not more than" + min); @@ -183,12 +188,25 @@ public boolean assertMetric(String name, String hostname, String[] tags) { // Assert that a metric with the same name and tags has already been submitted without checking the value. DatadogMetric m = new DatadogMetric(name, 0, hostname, Arrays.asList(tags)); Optional match = this.metrics.stream().filter(t -> t.same(m)).findFirst(); - if(match.isPresent()){ + if (match.isPresent()) { this.metrics.remove(match.get()); return true; } - Assert.fail("metric { " + m.toString() + " does not exist (ignoring value). " + - "metrics: {" + this.metrics.toString() + " }"); + + List sameMetricsNoTags = metrics.stream().filter(t -> t.sameNoTags(m)).collect(Collectors.toList()); + if (!sameMetricsNoTags.isEmpty()) { + Assert.fail("metric { " + m + " does not exist (ignoring value).\n" + + "Same metrics ignoring tags: {" + sameMetricsNoTags + " }"); + } + + List metricsWithSameName = metrics.stream().filter(t -> Objects.equal(t.getName(), m.getName())).collect(Collectors.toList()); + if (!metricsWithSameName.isEmpty()) { + Assert.fail("metric { " + m + " does not exist (ignoring value).\n" + + "Metrics with same name: {" + metricsWithSameName + " }"); + } + + Assert.fail("metric { " + m + " does not exist (ignoring value).\n" + + "Metrics: {" + this.metrics.toString() + " }"); return false; } @@ -238,11 +256,11 @@ public boolean assertedAllEvents() { return false; } - public static List convertTagMapToList(Map> tags){ + public static List convertTagMapToList(Map> tags) { List result = new ArrayList<>(); for (String name : tags.keySet()) { Set values = tags.get(name); - for (String value : values){ + for (String value : values) { result.add(String.format("%s:%s", name, value)); } } @@ -250,7 +268,7 @@ public static List convertTagMapToList(Map> tags){ } - public static Map> addTagToMap(Map> tags, String name, String value){ + public static Map> addTagToMap(Map> tags, String name, String value) { Set v = tags.containsKey(name) ? tags.get(name) : new HashSet(); v.add(value); tags.put(name, v); @@ -264,30 +282,37 @@ private static final class StubTraceWriteStrategy implements TraceWriteStrategy private final Collection traces = new LinkedBlockingQueue<>(); private final Collection webhooks = new LinkedBlockingQueue<>(); + @Nullable @Override public JSONObject serialize(BuildData buildData, Run run) { if (isWebhook) { - JSONObject json = new DatadogWebhookBuildLogic().finishBuildTrace(buildData, run); - webhooks.add(json); + JSONObject json = new DatadogWebhookBuildLogic().toJson(buildData, run); + if (json != null) { + webhooks.add(json); + } return json; } else { - TraceSpan span = new DatadogTraceBuildLogic().createSpan(buildData, run); - traces.add(span); - return new JsonTraceSpanMapper().map(span); + TraceSpan span = new DatadogTraceBuildLogic().toSpan(buildData, run); + if (span != null) { + traces.add(span); + return new JsonTraceSpanMapper().map(span); + } else { + return null; + } } } + @Nonnull @Override - public Collection serialize(FlowNode flowNode, Run run) { + public JSONObject serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException { if (isWebhook) { - Collection spans = new DatadogWebhookPipelineLogic().execute(flowNode, run); - webhooks.addAll(spans); - return spans; + JSONObject webhook = new DatadogWebhookPipelineLogic().toJson(node, run); + webhooks.add(webhook); + return webhook; } else { - Collection traceSpans = new DatadogTracePipelineLogic().collectTraces(flowNode, run); - traces.addAll(traceSpans); - JsonTraceSpanMapper mapper = new JsonTraceSpanMapper(); - return traceSpans.stream().map(mapper::map).collect(Collectors.toList()); + TraceSpan span = new DatadogTracePipelineLogic().toSpan(node, run); + traces.add(span); + return new JsonTraceSpanMapper().map(span); } } @@ -347,7 +372,7 @@ public List getWebhooks() { public List getSpans() { ArrayList spans = new ArrayList<>(traceWriteStrategy.traces); Collections.sort(spans, (span1, span2) -> { - if(span1.getStartNano() < span2.getStartNano()){ + if (span1.getStartNano() < span2.getStartNano()) { return -1; } else if (span1.getStartNano() > span2.getStartNano()) { return 1; diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java index f121a2aaa..79e194405 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java @@ -110,8 +110,8 @@ public void testDogstatsDClientGetInstanceEnableValidations() { public void testEvpProxyEnabled() { DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setEnableCiVisibility(true); - DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236)); - Mockito.doReturn(new HashSet(Arrays.asList("/evp_proxy/v3/"))).when(client).fetchAgentSupportedEndpoints(); + DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236, 1_000)); + Mockito.doReturn(new HashSet<>(Arrays.asList("/evp_proxy/v3/"))).when(client).fetchAgentSupportedEndpoints(); Assert.assertTrue(client.isEvpProxySupported()); } @@ -119,7 +119,7 @@ public void testEvpProxyEnabled() { public void testEvpProxyDisabled() { DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setEnableCiVisibility(true); - DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236)); + DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236, 1_000)); Mockito.doReturn(new HashSet()).when(client).fetchAgentSupportedEndpoints(); Assert.assertFalse(client.isEvpProxySupported()); } @@ -128,7 +128,7 @@ public void testEvpProxyDisabled() { public void testEmptyAgentSupportedEndpointsWithNoAgent() { DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setEnableCiVisibility(true); - DatadogAgentClient client = new DatadogAgentClient("test", 1234, 1235, 1236); + DatadogAgentClient client = new DatadogAgentClient("test", 1234, 1235, 1236, 1_000); Assert.assertTrue(client.fetchAgentSupportedEndpoints().isEmpty()); } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index 3c159bb75..33b476f3b 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -25,11 +25,16 @@ import hudson.model.FreeStyleBuild; import hudson.model.FreeStyleProject; import hudson.model.Label; +import hudson.plugins.git.BranchSpec; +import hudson.plugins.git.GitSCM; +import hudson.plugins.git.browser.GitRepositoryBrowser; +import hudson.plugins.git.extensions.impl.LocalBranch; import hudson.slaves.DumbSlave; import hudson.slaves.EnvironmentVariablesNodeProperty; import java.io.IOException; import java.io.InputStream; import java.net.URL; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -57,6 +62,11 @@ public class DatadogBuildListenerIT extends DatadogTraceAbstractTest { public static JenkinsRule jenkinsRule = new JenkinsRule(); private DatadogClientStub clientStub; + static { + // to allow checkout from local git repositories - needed for some tests + GitSCM.ALLOW_LOCAL_CHECKOUT = true; + } + @Before public void beforeEach() throws IOException { DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); @@ -112,17 +122,20 @@ public void testTraces() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL", "file:///tmp/git-repo/"); jenkins.getGlobalNodeProperties().add(prop); + createLocallyAvailableGitRepo(jenkins); + final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccess"); + + GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + project.setScm(git); + final FilePath ws = jenkins.getWorkspaceFor(project); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + FreeStyleBuild run = project.scheduleBuild2(0).get(); final String buildPrefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); @@ -156,12 +169,20 @@ public void testTraces() throws Exception { checkHostNameTag(meta); assertEquals("success", meta.get(CITags.JENKINS_RESULT)); assertEquals("jenkins-buildIntegrationSuccess-1", meta.get(CITags.JENKINS_TAG)); - assertNotNull(meta.get(CITags._DD_CI_STAGES)); - assertEquals("[]", meta.get(CITags._DD_CI_STAGES)); + assertNull(meta.get(CITags._DD_CI_STAGES)); // this is a freestyle project which has no stages assertCleanupActions(run); } + private void createLocallyAvailableGitRepo(Jenkins jenkins) throws IOException, InterruptedException { + try (InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) { + FilePath gitRepoPath = jenkins.createPath("/tmp/git-repo"); + gitRepoPath.deleteRecursive(); + gitRepoPath.mkdirs(); + gitRepoPath.unzipFrom(gitZip); + } + } + @Test public void testGitDefaultBranch() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; @@ -169,16 +190,18 @@ public void testGitDefaultBranch() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL", "file:///tmp/git-repo/"); final String defaultBranch = "refs/heads/hardcoded-master"; env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch); jenkins.getGlobalNodeProperties().add(prop); + createLocallyAvailableGitRepo(jenkins); + final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessDefaultBranch"); - final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - project.setScm(new ExtractResourceSCM(gitZip)); - } + + GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + project.setScm(git); + project.scheduleBuild2(0).get(); clientStub.waitForTraces(1); @@ -198,18 +221,20 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git"); + env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); final String defaultBranch = "refs/heads/hardcoded-master"; env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); jenkins.getGlobalNodeProperties().add(prop); + createLocallyAvailableGitRepo(jenkins); + final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithoutCommitInfo"); - final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - project.setScm(new ExtractResourceSCM(gitZip)); - } + + GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + project.setScm(git); + project.scheduleBuild2(0).get(); clientStub.waitForTraces(1); @@ -228,7 +253,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { env.put(GIT_REPOSITORY_URL, "not-valid-repo"); env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git"); + env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -242,11 +267,13 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); jenkins.getGlobalNodeProperties().add(prop); + createLocallyAvailableGitRepo(jenkins); + final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfo"); - final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - project.setScm(new ExtractResourceSCM(gitZip)); - } + + GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + project.setScm(git); + project.scheduleBuild2(0).get(); clientStub.waitForTraces(1); @@ -265,19 +292,21 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("https://github.com/johndoe/foobar.git", meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file:///tmp/git-repo/", meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH)); } @Test public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { + clientStub.configureForWebhooks(); + Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); env.put(GIT_REPOSITORY_URL, "not-valid-repo"); env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git"); + env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -291,11 +320,13 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); jenkins.getGlobalNodeProperties().add(prop); + createLocallyAvailableGitRepo(jenkins); + final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfoWebhook"); - final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - project.setScm(new ExtractResourceSCM(gitZip)); - } + + GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + project.setScm(git); + project.scheduleBuild2(0).get(); clientStub.waitForWebhooks(1); @@ -313,7 +344,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { assertEquals("hardcoded-committer-date", meta.getString("commit_time")); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.getString("sha")); assertEquals("master", meta.getString("branch")); - assertEquals("https://github.com/johndoe/foobar.git", meta.getString("repository_url")); + assertEquals("file:///tmp/git-repo/", meta.getString("repository_url")); assertEquals("hardcoded-master", meta.getString("default_branch")); } @@ -373,22 +404,26 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { @Test public void testGitAlternativeRepoUrlWebhook() throws Exception { + clientStub.configureForWebhooks(); + Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL_1", "file:///tmp/git-repo/"); jenkins.getGlobalNodeProperties().add(prop); + createLocallyAvailableGitRepo(jenkins); + final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessAltRepoUrlWebhook"); + + GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + project.setScm(git); + final FilePath ws = jenkins.getWorkspaceFor(project); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } project.scheduleBuild2(0).get(); @@ -415,6 +450,8 @@ public void testTracesDisabled() throws Exception { @Test public void testTracesDisabledWebhooks() throws Exception { + clientStub.configureForWebhooks(); + DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setEnableCiVisibility(false); @@ -447,6 +484,8 @@ public void testCITagsOnTraces() throws Exception { @Test public void testCITagsOnWebhooks() throws Exception { + clientStub.configureForWebhooks(); + DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setGlobalJobTags("(.*?)_job, global_job_tag:$ENV_VAR"); cfg.setGlobalTags("global_tag:$ENV_VAR"); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java index b9622dbfe..4d7058ada 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java @@ -28,6 +28,7 @@ of this software and associated documentation files (the "Software"), to deal import com.cloudbees.workflow.rest.external.StageNodeExt; import hudson.EnvVars; import hudson.model.*; +import java.nio.charset.Charset; import jenkins.model.Jenkins; import org.datadog.jenkins.plugins.datadog.DatadogEvent.AlertType; import org.datadog.jenkins.plugins.datadog.DatadogEvent.Priority; @@ -82,7 +83,7 @@ public void setUpMocks() { envVars.put("GIT_BRANCH", "test-branch"); workflowRun = mock(WorkflowRun.class); - + when(workflowRun.getCharset()).thenReturn(Charset.defaultCharset()); } @Test diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index 12c3f19af..893b18f97 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -75,8 +75,10 @@ public class DatadogGraphListenerTest extends DatadogTraceAbstractTest { public static JenkinsRule jenkinsRule; static { + // to allow checkout from local git repositories - needed for some tests + System.setProperty("hudson.plugins.git.GitSCM.ALLOW_LOCAL_CHECKOUT", "true"); jenkinsRule = new JenkinsRule(); - jenkinsRule.timeout = 300; // default value of 180 is too small for all the test cases in this class + jenkinsRule.timeout = 600; // default value of 180 is too small for all the test cases in this class } private DatadogGraphListener listener; @@ -147,7 +149,10 @@ public void testNewNode() throws IOException { @Test public void testIntegration() throws Exception { - jenkinsRule.createOnlineSlave(new LabelAtom("windows")); + EnvVars windowsEnvVars = new EnvVars(); + String windowsHostname = "windows-hostname"; + windowsEnvVars.put("HOSTNAME", windowsHostname); + jenkinsRule.createOnlineSlave(new LabelAtom("windows"), windowsEnvVars); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration"); String definition = IOUtils.toString( this.getClass().getResourceAsStream("testPipelineDefinition.txt"), @@ -161,7 +166,6 @@ public void testIntegration() throws Exception { System.out.println(s); } br.close(); - String hostname = DatadogUtilities.getHostname(null); String[] baseTags = new String[]{ "jenkins_url:" + DatadogUtilities.getJenkinsUrl(), "user_id:anonymous", @@ -174,13 +178,23 @@ public void testIntegration() throws Exception { String[] parentNames = new String[]{ "Test On Windows", "Test On Windows", "Test On Windows", "Parallel tests", "Parallel tests", "root", "root" }; for (int i = 0; i < depths.length; i++) { + String hostname; + String stageName = stageNames[i]; + if (stageName.contains("Windows")) { + // agent { label "windows" } + hostname = windowsHostname; + } else { + // agent { label "built-in" } + hostname = DatadogUtilities.getHostname(null); + } + String[] expectedTags = Arrays.copyOf(baseTags, baseTags.length + 3); expectedTags[expectedTags.length - 3] = "stage_depth:" + depths[i]; expectedTags[expectedTags.length - 2] = "stage_name:" + stageNames[i]; expectedTags[expectedTags.length - 1] = "parent_stage_name:" + parentNames[i]; clientStub.assertMetric("jenkins.job.stage_duration", hostname, expectedTags); - if (stageNames[i] == "Test On Linux" || stageNames[i] == "Parallel tests") { + if (stageName.equals("Test On Linux") || stageName.equals("Parallel tests")) { // Timeout is set to 11s, but since there are other instructions, // we test it's at least 10s. double pauseValue = clientStub.assertMetricGetValue("jenkins.job.stage_pause_duration", hostname, expectedTags); @@ -203,11 +217,11 @@ public void testIntegrationGitInfo() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL", "file:///tmp/git-repo/"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommit"); String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), + this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), "UTF-8" ); @@ -215,10 +229,9 @@ public void testIntegrationGitInfo() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -261,11 +274,11 @@ public void testIntegrationGitInfoWebhooks() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL", "file:///tmp/git-repo/"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitWebhooks"); String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), + this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), "UTF-8" ); @@ -273,10 +286,9 @@ public void testIntegrationGitInfoWebhooks() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -293,13 +305,13 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL", "file:///tmp/git-repo/"); final String defaultBranch = "refs/heads/hardcoded-master"; env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitDefaultBranchEnvVar"); String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), + this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), "UTF-8" ); @@ -307,10 +319,9 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -328,7 +339,7 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL", "file:///tmp/git-repo/"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationOverrideCommit"); String definition = IOUtils.toString( @@ -340,10 +351,9 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -362,7 +372,7 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL_1", "file:///tmp/git-repo/"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrl"); String definition = IOUtils.toString( @@ -374,10 +384,9 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -385,7 +394,7 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { final List spans = clientStub.getSpans(); assertEquals(5, spans.size()); for(TraceSpan span : spans) { - assertEquals("https://github.com/johndoe/foobar.git", span.getMeta().get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file:///tmp/git-repo/", span.getMeta().get(CITags.GIT_REPOSITORY_URL)); } } @@ -398,7 +407,7 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "https://github.com/johndoe/foobar.git"); + env.put("GIT_URL_1", "file:///tmp/git-repo/"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrlWebhooks"); String definition = IOUtils.toString( @@ -410,10 +419,9 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -421,7 +429,7 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { final List webhooks = clientStub.getWebhooks(); assertEquals(5, webhooks.size()); for(JSONObject webhook : webhooks) { - assertEquals("https://github.com/johndoe/foobar.git", webhook.getJSONObject("git").get("repository_url")); + assertEquals("file:///tmp/git-repo/", webhook.getJSONObject("git").get("repository_url")); } } @@ -430,14 +438,14 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git"); + env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_TAG, "0.1.0"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithoutCommitInfo"); String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), + this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), "UTF-8" ); @@ -445,10 +453,9 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -461,6 +468,15 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { assertEquals("0.1.0", meta.get(CITags.GIT_TAG)); } + private void createLocallyAvailableGitRepo(Jenkins jenkins) throws IOException, InterruptedException { + try (InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) { + FilePath gitRepoPath = jenkins.createPath("/tmp/git-repo"); + gitRepoPath.deleteRecursive(); + gitRepoPath.mkdirs(); + gitRepoPath.unzipFrom(gitZip); + } + } + @Test public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { clientStub.configureForWebhooks(); @@ -468,14 +484,14 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git"); + env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_TAG, "0.1.0"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithoutCommitInfoWebhooks"); String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), + this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), "UTF-8" ); @@ -483,10 +499,9 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -503,7 +518,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git"); + env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -517,7 +532,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithCommitInfo"); String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), + this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), "UTF-8" ); @@ -525,10 +540,9 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { final FilePath ws = jenkins.getWorkspaceFor(job); env.put("NODE_NAME", "master"); env.put("WORKSPACE", ws.getRemote()); - InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip"); - if(gitZip != null) { - ws.unzipFrom(gitZip); - } + + createLocallyAvailableGitRepo(jenkins); + jenkins.getGlobalNodeProperties().add(prop); job.scheduleBuild2(0).get(); @@ -547,7 +561,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("https://github.com/johndoe/foobar.git", meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file:///tmp/git-repo/", meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH)); } @@ -713,15 +727,11 @@ public void testIntegrationPipelineQueueTimeOnStages() throws Exception { final List spans = clientStub.getSpans(); assertEquals(6, spans.size()); - final TraceSpan buildSpan = spans.get(0); + final TraceSpan buildSpan = searchSpan(spans, "pipelineIntegrationQueueTimeOnStages"); assertEquals(Double.valueOf(0), buildSpan.getMetrics().get(CITags.QUEUE_TIME)); - assertEquals("built-in", buildSpan.getMeta().get(CITags.NODE_NAME)); - assertEquals("[\"built-in\"]", buildSpan.getMeta().get(CITags.NODE_LABELS)); - final TraceSpan runStages = spans.get(1); + final TraceSpan runStages = searchSpan(spans, "Run stages"); assertEquals(Double.valueOf(0), runStages.getMetrics().get(CITags.QUEUE_TIME)); - assertEquals("built-in", runStages.getMeta().get(CITags.NODE_NAME)); - assertEquals("[\"built-in\"]", runStages.getMeta().get(CITags.NODE_LABELS)); final TraceSpan stage1 = searchSpan(spans, "Stage 1"); final Double stage1QueueTime = stage1.getMetrics().get(CITags.QUEUE_TIME); @@ -846,7 +856,7 @@ public void testIntegrationPipelineQueueTimeOnPipeline() throws Exception { @Test public void testIntegrationNoFailureTag() throws Exception { - jenkinsRule.createOnlineSlave(new LabelAtom("windows")); + jenkinsRule.createOnlineSlave(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSuccess"); String definition = IOUtils.toString( this.getClass().getResourceAsStream("testPipelineSuccess.txt"), @@ -914,7 +924,7 @@ public void testIntegrationNoFailureTag() throws Exception { assertNotNull(stageSpanMeta.get(stagePrefix + CITags._URL)); assertNotNull(stageSpanMeta.get(CITags.NODE_NAME)); assertNotNull(stageSpanMeta.get(CITags.NODE_LABELS)); - checkHostNameTag(buildSpanMeta); + checkHostNameTag(stageSpanMeta); assertEquals("false", stageSpanMeta.get(CITags._DD_CI_INTERNAL)); assertEquals("4", stageSpanMeta.get(stagePrefix + CITags._NUMBER)); assertEquals(BuildPipelineNode.NodeType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); @@ -986,9 +996,10 @@ public void testIntegrationPipelineSkippedLogicWebhook() throws Exception { final List webhooks = clientStub.getWebhooks(); assertEquals(2, webhooks.size()); - final JSONObject webhook = webhooks.get(1); - assertEquals("Stage", webhook.getString("name")); - assertEquals("skipped", webhook.getString("status")); + final JSONObject stage = searchWebhookByLevel(webhooks, "stage"); + assertNotNull("Could not find stage webhook", stage); + assertEquals("Stage", stage.getString("name")); + assertEquals("skipped", stage.getString("status")); } @Test @@ -996,7 +1007,7 @@ public void testIntegrationTracesDisabled() throws Exception{ DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setEnableCiVisibility(false); - jenkinsRule.createOnlineSlave(new LabelAtom("windows")); + jenkinsRule.createOnlineSlave(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSuccess-notraces"); String definition = IOUtils.toString( this.getClass().getResourceAsStream("testPipelineSuccess.txt"), @@ -1088,8 +1099,6 @@ public void testStagesNodeNames_complexPipelineStages01() throws Exception { final TraceSpan prepareBlock = spans.get(1); assertEquals("Prepare", prepareBlock.getResourceName()); - assertEquals(worker03.getNodeName(), prepareBlock.getMeta().get(CITags.NODE_NAME)); - assertTrue(prepareBlock.getMeta().get(CITags.NODE_LABELS).contains(worker03.getNodeName())); final TraceSpan prepareStage01 = spans.get(2); assertNodeNameParallelBlock(prepareStage01, worker01, worker02); @@ -1558,8 +1567,8 @@ public void testIsManualTrueWebhooks() throws Exception { clientStub.waitForWebhooks(3); final List webhooks = clientStub.getWebhooks(); assertEquals(3, webhooks.size()); - final JSONObject webhook = webhooks.get(0); - assertTrue(webhook.getBoolean("is_manual")); + JSONObject pipeline = searchWebhookByLevel(webhooks, "pipeline"); + assertTrue(pipeline.getBoolean("is_manual")); } @Test @@ -1577,11 +1586,19 @@ public void testIsManualFalse() throws Exception { clientStub.waitForTraces(3); final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); - final TraceSpan buildSpan = spans.get(0); + final TraceSpan buildSpan = getBuild(spans); final String isManual = buildSpan.getMeta().get(CITags.IS_MANUAL); assertEquals("false", isManual); } + private TraceSpan getBuild(List spans) { + for (TraceSpan span : spans) { + if ("jenkins.build".equals(span.getOperationName())) { + return span; + } + } + return null; + } @Test public void testIsManualFalseWebhooks() throws Exception { @@ -1599,11 +1616,10 @@ public void testIsManualFalseWebhooks() throws Exception { clientStub.waitForWebhooks(3); final List webhooks = clientStub.getWebhooks(); assertEquals(3, webhooks.size()); - final JSONObject webhook = webhooks.get(0); - assertFalse(webhook.getBoolean("is_manual")); + final JSONObject pipeline = searchWebhookByLevel(webhooks, "pipeline"); + assertFalse(pipeline.getBoolean("is_manual")); } - @Test public void testErrorPropagationOnNestedStages() throws Exception { givenPipeline("testErrorPropagationOnNestedStages", "testErrorPropagationOnNestedStages.txt"); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java index 8f7dcc93f..3ea69acbb 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java @@ -1,28 +1,21 @@ package org.datadog.jenkins.plugins.datadog.listeners; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import hudson.model.Run; +import java.util.Map; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction; import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; -import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; import org.datadog.jenkins.plugins.datadog.traces.CITags; -import org.datadog.jenkins.plugins.datadog.traces.IsPipelineAction; -import org.datadog.jenkins.plugins.datadog.traces.StepDataAction; -import org.datadog.jenkins.plugins.datadog.traces.StepTraceDataAction; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import java.util.Map; - -import static org.junit.Assert.*; - public abstract class DatadogTraceAbstractTest { protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch) { @@ -37,7 +30,7 @@ protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch) { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("https://github.com/johndoe/foobar.git", meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file:///tmp/git-repo/", meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals(defaultBranch, meta.get(CITags.GIT_DEFAULT_BRANCH)); } @@ -52,21 +45,16 @@ protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBra assertEquals("2020-10-08T07:49:32.000Z", meta.get("commit_time")); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get("sha")); assertEquals("master", meta.get("branch")); - assertEquals("https://github.com/johndoe/foobar.git", meta.get("repository_url")); + assertEquals("file:///tmp/git-repo/", meta.get("repository_url")); assertEquals(defaultBranch, meta.get("default_branch")); } protected void assertCleanupActions(Run run) { assertNull(run.getAction(BuildSpanAction.class)); - assertNull(run.getAction(StepDataAction.class)); - assertNull(run.getAction(CIGlobalTagsAction.class)); assertNull(run.getAction(GitCommitAction.class)); assertNull(run.getAction(GitRepositoryAction.class)); assertNull(run.getAction(PipelineNodeInfoAction.class)); assertNull(run.getAction(PipelineQueueInfoAction.class)); - assertNull(run.getAction(StageBreakdownAction.class)); - assertNull(run.getAction(IsPipelineAction.class)); - assertNull(run.getAction(StepTraceDataAction.class)); } protected void checkHostNameTag(Map meta) { diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/logs/DatadogTaskListenerDecoratorTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/logs/DatadogTaskListenerDecoratorTest.java index dacf12945..13c39cd48 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/logs/DatadogTaskListenerDecoratorTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/logs/DatadogTaskListenerDecoratorTest.java @@ -24,29 +24,34 @@ of this software and associated documentation files (the "Software"), to deal */ package org.datadog.jenkins.plugins.datadog.logs; -import org.jenkinsci.plugins.workflow.job.WorkflowJob; -import org.jenkinsci.plugins.workflow.job.WorkflowRun; -import org.junit.Before; -import org.junit.Test; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import hudson.EnvVars; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; - -import static org.mockito.Mockito.*; +import java.nio.charset.Charset; +import org.jenkinsci.plugins.workflow.job.WorkflowJob; +import org.jenkinsci.plugins.workflow.job.WorkflowRun; +import org.junit.Before; +import org.junit.Test; public class DatadogTaskListenerDecoratorTest { private WorkflowRun workflowRun; private WorkflowJob job; @Before - public void setupMock() { + public void setupMock() throws Exception { workflowRun = mock(WorkflowRun.class); job = mock(WorkflowJob.class); when(job.getFullName()).thenReturn("Pipeline job"); when(workflowRun.getParent()).thenReturn(job); + when(workflowRun.getEnvironment(any())).thenReturn(mock(EnvVars.class)); + when(workflowRun.getCharset()).thenReturn(mock(Charset.class)); } @Test diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/ActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/ActionConverterTest.java new file mode 100644 index 000000000..ecbba952c --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/ActionConverterTest.java @@ -0,0 +1,37 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import static org.junit.Assert.assertEquals; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import hudson.util.XStream2; +import org.junit.Before; +import org.junit.Test; + +public abstract class ActionConverterTest { + + private final XStream XSTREAM = new XStream2(XStream2.getDefaultDriver()); + + private final T action; + + public ActionConverterTest(final T action) { + this.action = action; + } + + protected abstract Converter getConverter(XStream xStream); + + @Before + public void setUp() { + Converter converter = getConverter(XSTREAM); + XSTREAM.registerConverter(converter); + assertEquals(converter, XSTREAM.getConverterLookup().lookupConverterForType(action.getClass())); + } + + @Test + public void testStatusActionConverter() { + String actionXml = XSTREAM.toXML(action); + T deserializedAction = (T) XSTREAM.fromXML(actionXml); + assertEquals(action, deserializedAction); + } + +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineTest.java deleted file mode 100644 index 26b3134a1..000000000 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineTest.java +++ /dev/null @@ -1,67 +0,0 @@ -package org.datadog.jenkins.plugins.datadog.model; - -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_BLOCK_END_NODE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_BLOCK_START_NODE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_BLOCK_START_NODE_NAME; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_FLOW_END_NODE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_FLOW_START_NODE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_FLOW_START_NODE_NAME; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_ATOM_NODE_ONE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_ATOM_NODE_ONE_NAME; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_ATOM_NODE_TWO_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_ATOM_NODE_TWO_NAME; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_END_NODE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_START_NODE_ID; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_STEP_START_NODE_NAME; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.SAMPLE_TIME; -import static org.datadog.jenkins.plugins.datadog.traces.DatadogTracesUtilities.getDummyPipeline; -import static org.junit.Assert.assertEquals; - -import org.jenkinsci.plugins.workflow.graph.FlowNode; -import org.junit.Test; - -import java.util.Map; - -public class BuildPipelineTest { - - @Test - public void testShouldBuildPipelineWithNestedStages() { - //Given - final Map flowNodeById = getDummyPipeline(); - final BuildPipeline pipeline = new BuildPipeline(); - pipeline.add(flowNodeById.get(SAMPLE_FLOW_END_NODE_ID)); - pipeline.add(flowNodeById.get(SAMPLE_BLOCK_END_NODE_ID)); - pipeline.add(flowNodeById.get(SAMPLE_STEP_END_NODE_ID)); - pipeline.add(flowNodeById.get(SAMPLE_STEP_ATOM_NODE_TWO_ID)); - pipeline.add(flowNodeById.get(SAMPLE_STEP_ATOM_NODE_ONE_ID)); - pipeline.add(flowNodeById.get(SAMPLE_STEP_START_NODE_ID)); - pipeline.add(flowNodeById.get(SAMPLE_BLOCK_START_NODE_ID)); - pipeline.add(flowNodeById.get(SAMPLE_FLOW_START_NODE_ID)); - - //When - final BuildPipelineNode pipelineRoot = pipeline.buildTree(); - - //Then - assertNode(pipelineRoot, SAMPLE_FLOW_START_NODE_ID, SAMPLE_FLOW_START_NODE_NAME, SAMPLE_TIME * 1000, SAMPLE_TIME * 1000, 1); - - final BuildPipelineNode rootChild = pipelineRoot.getChildren().get(0); - assertNode(rootChild, SAMPLE_BLOCK_START_NODE_ID, SAMPLE_BLOCK_START_NODE_NAME, SAMPLE_TIME * 1000, SAMPLE_TIME * 1000, 1); - - final BuildPipelineNode rootChildChild = rootChild.getChildren().get(0); - assertNode(rootChildChild, SAMPLE_STEP_START_NODE_ID, SAMPLE_STEP_START_NODE_NAME, SAMPLE_TIME * 1000, SAMPLE_TIME * 1000, 2); - - final BuildPipelineNode atomOne = rootChildChild.getChildren().get(0); - assertNode(atomOne, SAMPLE_STEP_ATOM_NODE_ONE_ID, SAMPLE_STEP_ATOM_NODE_ONE_NAME, (SAMPLE_TIME + 100) * 1000, (SAMPLE_TIME + 200) * 1000, 0); - - final BuildPipelineNode atomTwo = rootChildChild.getChildren().get(1); - assertNode(atomTwo, SAMPLE_STEP_ATOM_NODE_TWO_ID, SAMPLE_STEP_ATOM_NODE_TWO_NAME, (SAMPLE_TIME + 200) * 1000, SAMPLE_TIME * 1000, 0); - } - - private void assertNode(BuildPipelineNode node, String expectedId, String expectedName, long expectedStartTimeMicros, long expectedEndTimeMicros, int expectedChildrenSize) { - assertEquals(expectedId, node.getId()); - assertEquals(expectedName, node.getName()); - assertEquals(expectedStartTimeMicros , node.getStartTimeMicros()); - assertEquals(expectedEndTimeMicros , node.getEndTimeMicros()); - assertEquals(expectedChildrenSize, node.getChildren().size()); - } -} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildSpanActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildSpanActionConverterTest.java new file mode 100644 index 000000000..b4af244bf --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/BuildSpanActionConverterTest.java @@ -0,0 +1,35 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; +import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class BuildSpanActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new BuildSpanAction(new TraceSpan.TraceSpanContext(123, 456, 789), "buildUrl")}, + {new BuildSpanAction(new TraceSpan.TraceSpanContext(0, 456, 789), "buildUrl")}, + {new BuildSpanAction(new TraceSpan.TraceSpanContext(123, 0, 789), null)}, + {new BuildSpanAction(new TraceSpan.TraceSpanContext(123, 456, 0), "buildUrl")}, + {new BuildSpanAction(new TraceSpan.TraceSpanContext(0, 0, 0), null)}, + }); + } + + public BuildSpanActionConverterTest(final BuildSpanAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + xStream.registerConverter(new TraceSpan.TraceSpanContext.ConverterImpl(xStream)); + return new BuildSpanAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/GitCommitActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/GitCommitActionConverterTest.java new file mode 100644 index 000000000..3aaf75dcd --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/GitCommitActionConverterTest.java @@ -0,0 +1,41 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; +import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class GitCommitActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new GitCommitAction("tag", "commit", "message", "authorName", "authorEmail", "authorDate", "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction(null, "commit", "message", "authorName", "authorEmail", "authorDate", "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction("tag", null, "message", "authorName", "authorEmail", "authorDate", "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction("tag", "commit", null, "authorName", "authorEmail", "authorDate", "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction("tag", "commit", "message", null, "authorEmail", "authorDate", "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction("tag", "commit", "message", "authorName", null, "authorDate", "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction("tag", "commit", "message", "authorName", "authorEmail", null, "committerName", "committerEmail", "committerDate")}, + {new GitCommitAction("tag", "commit", "message", "authorName", "authorEmail", "authorDate", null, "committerEmail", "committerDate")}, + {new GitCommitAction("tag", "commit", "message", "authorName", "authorEmail", "authorDate", "committerName", null, "committerDate")}, + {new GitCommitAction("tag", "commit", "message", "authorName", "authorEmail", "authorDate", "committerName", "committerEmail", null)}, + {new GitCommitAction(null, null, null, null, null, null, null, null, null)}, + }); + } + + public GitCommitActionConverterTest(final GitCommitAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new GitCommitAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryActionConverterTest.java new file mode 100644 index 000000000..f1e36c049 --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryActionConverterTest.java @@ -0,0 +1,39 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class GitRepositoryActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new GitRepositoryAction("repoUrl", "defaultBranch", "branch")}, + {new GitRepositoryAction(null, "defaultBranch", "branch")}, + {new GitRepositoryAction("repoUrl", null, "branch")}, + {new GitRepositoryAction("repoUrl", "defaultBranch", null)}, + {new GitRepositoryAction(null, null, "branch")}, + {new GitRepositoryAction(null, "defaultBranch", null)}, + {new GitRepositoryAction("repoUrl", null, null)}, + {new GitRepositoryAction(null, null, null)}, + }); + } + + public GitRepositoryActionConverterTest(final GitRepositoryAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new GitRepositoryAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoActionConverterTest.java new file mode 100644 index 000000000..8430cfc63 --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoActionConverterTest.java @@ -0,0 +1,39 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; +import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class PipelineNodeInfoActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new PipelineNodeInfoAction("nodeName", Collections.singleton("nodeLabels"), "nodeHostname", "workspace")}, + {new PipelineNodeInfoAction("nodeName", new HashSet<>(Arrays.asList("label1", "label2")), "nodeHostname", "workspace")}, + {new PipelineNodeInfoAction(null, Collections.singleton("nodeLabels"), "nodeHostname", "workspace")}, + {new PipelineNodeInfoAction("nodeName", Collections.emptySet(), "nodeHostname", "workspace")}, + {new PipelineNodeInfoAction("nodeName", Collections.singleton("nodeLabels"), null, "workspace")}, + {new PipelineNodeInfoAction("nodeName", Collections.singleton("nodeLabels"), "nodeHostname", null)}, + {new PipelineNodeInfoAction(null, Collections.emptySet(), null, null)}, + }); + } + + public PipelineNodeInfoActionConverterTest(final PipelineNodeInfoAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new PipelineNodeInfoAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoActionConverterTest.java new file mode 100644 index 000000000..0be6d7944 --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoActionConverterTest.java @@ -0,0 +1,34 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; +import org.datadog.jenkins.plugins.datadog.model.PipelineQueueInfoAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class PipelineQueueInfoActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new PipelineQueueInfoAction(123, 456)}, + {new PipelineQueueInfoAction(-1, 456)}, + {new PipelineQueueInfoAction(123, -1)}, + {new PipelineQueueInfoAction(-1, -1)}, + }); + } + + public PipelineQueueInfoActionConverterTest(final PipelineQueueInfoAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new PipelineQueueInfoAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoActionConverterTest.java new file mode 100644 index 000000000..9bc365e4a --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoActionConverterTest.java @@ -0,0 +1,32 @@ +package org.datadog.jenkins.plugins.datadog.model; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class TraceInfoActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new TraceInfoAction(Collections.emptyMap())}, + {new TraceInfoAction(Collections.singletonMap("123", 123L))}, + {new TraceInfoAction(new HashMap() {{ put("123", 123L); put("456", 456L); }})}, + }); + } + + public TraceInfoActionConverterTest(final TraceInfoAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new TraceInfoAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueActionConverterTest.java new file mode 100644 index 000000000..15c405ae2 --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueActionConverterTest.java @@ -0,0 +1,31 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.node.DequeueAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class DequeueActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new DequeueAction(0)}, + {new DequeueAction(12345)}, + }); + } + + public DequeueActionConverterTest(final DequeueAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new DequeueAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueActionConverterTest.java new file mode 100644 index 000000000..4e8d4be4d --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueActionConverterTest.java @@ -0,0 +1,32 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.node.EnqueueAction; +import org.datadog.jenkins.plugins.datadog.model.node.StatusAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class EnqueueActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new EnqueueAction(0)}, + {new EnqueueAction(12345)}, + }); + } + + public EnqueueActionConverterTest(final EnqueueAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new EnqueueAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoActionConverterTest.java new file mode 100644 index 000000000..a5ee35080 --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoActionConverterTest.java @@ -0,0 +1,39 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class NodeInfoActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new NodeInfoAction("nodeName", "nodeHostname", Collections.singleton("nodeLabel"), "nodeWorkspace")}, + {new NodeInfoAction("nodeName", "nodeHostname", new HashSet<>(Arrays.asList("nodeLabel1", "nodeLabel2")), null)}, + {new NodeInfoAction(null, "nodeHostname", new HashSet<>(Arrays.asList("nodeLabel1", "nodeLabel2")), "nodeWorkspace")}, + {new NodeInfoAction("nodeName", null, new HashSet<>(Arrays.asList("nodeLabel1", "nodeLabel2")), null)}, + {new NodeInfoAction("nodeName", "nodeHostname", Collections.emptySet(), "nodeWorkspace")}, + {new NodeInfoAction(null, null, new HashSet<>(Arrays.asList("nodeLabel1", "nodeLabel2")), null)}, + {new NodeInfoAction(null, "nodeHostname", Collections.emptySet(), "nodeWorkspace")}, + {new NodeInfoAction("nodeName", null, Collections.emptySet(), null)}, + {new NodeInfoAction(null, null, Collections.emptySet(), null)}, + }); + } + + public NodeInfoActionConverterTest(final NodeInfoAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new NodeInfoAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/StatusActionConverterTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/StatusActionConverterTest.java new file mode 100644 index 000000000..e5f788af3 --- /dev/null +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/model/node/StatusActionConverterTest.java @@ -0,0 +1,33 @@ +package org.datadog.jenkins.plugins.datadog.model.node; + +import com.thoughtworks.xstream.XStream; +import com.thoughtworks.xstream.converters.Converter; +import java.util.Arrays; +import java.util.Collection; +import org.datadog.jenkins.plugins.datadog.model.ActionConverterTest; +import org.datadog.jenkins.plugins.datadog.model.Status; +import org.datadog.jenkins.plugins.datadog.model.node.StatusAction; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class StatusActionConverterTest extends ActionConverterTest { + + @Parameterized.Parameters(name = "{0}") + public static Collection data() { + return Arrays.asList(new Object[][]{ + {new StatusAction(Status.SUCCESS, true)}, + {new StatusAction(Status.ERROR, false)}, + {new StatusAction(Status.UNKNOWN, true)} + }); + } + + public StatusActionConverterTest(final StatusAction action) { + super(action); + } + + @Override + protected Converter getConverter(XStream xStream) { + return new StatusAction.ConverterImpl(xStream); + } +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptionsTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptionsTest.java index 51b7c6d2a..86d33ffb7 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptionsTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptionsTest.java @@ -1,7 +1,9 @@ package org.datadog.jenkins.plugins.datadog.steps; +import hudson.EnvVars; import hudson.ExtensionList; import hudson.model.labels.LabelAtom; +import java.util.List; import net.sf.json.JSONObject; import org.apache.commons.io.IOUtils; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; @@ -16,9 +18,10 @@ import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; -import java.util.List; - public class DatadogOptionsTest { + + private static final String TEST_NODE_HOSTNAME = "test-node-hostname"; + @ClassRule public static JenkinsRule j = new JenkinsRule(); private static DatadogClientStub stubClient = new DatadogClientStub(); @@ -29,7 +32,10 @@ public static void setup() throws Exception { DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); ExtensionList.clearLegacyInstances(); cfg.setCollectBuildLogs(false); - j.createOnlineSlave(new LabelAtom("test")); + + EnvVars testNodeEnvVars = new EnvVars(); + testNodeEnvVars.put("HOSTNAME", TEST_NODE_HOSTNAME); + j.createOnlineSlave(new LabelAtom("test"), testNodeEnvVars); } @Test @@ -46,7 +52,6 @@ public void testLogCollection() throws Exception { @Test public void testMetricTags() throws Exception { - WorkflowJob p = j.jenkins.createProject(WorkflowJob.class, "testMetricTags"); String definition = IOUtils.toString( this.getClass().getResourceAsStream("pipelineMetricTags.txt"), @@ -54,8 +59,8 @@ public void testMetricTags() throws Exception { ); p.setDefinition(new CpsFlowDefinition(definition, true)); p.scheduleBuild2(0).get(); - String hostname = DatadogUtilities.getHostname(null); String[] expectedTags = new String[]{ + "node:slave0", "jenkins_url:" + DatadogUtilities.getJenkinsUrl(), "user_id:anonymous", "job:testMetricTags", @@ -63,7 +68,7 @@ public void testMetricTags() throws Exception { "foo:bar", "bar:foo" }; - stubClient.assertMetric("jenkins.job.duration", hostname, expectedTags); + stubClient.assertMetric("jenkins.job.duration", TEST_NODE_HOSTNAME, expectedTags); } private void assertLogs(final String expectedMessage, final boolean checkTraces) { diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/stubs/BuildStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/stubs/BuildStub.java index 4bc1b4b94..e1d0b99d2 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/stubs/BuildStub.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/stubs/BuildStub.java @@ -1,10 +1,11 @@ package org.datadog.jenkins.plugins.datadog.stubs; import hudson.EnvVars; -import hudson.model.*; - -import javax.annotation.Nonnull; +import hudson.model.Build; +import hudson.model.Result; +import hudson.model.TaskListener; import java.io.IOException; +import javax.annotation.Nonnull; public class BuildStub extends Build { @@ -16,12 +17,12 @@ public class BuildStub extends Build { private BuildStub previousBuiltBuild; private BuildStub previousNotFailedBuild; - public BuildStub(ProjectStub project, Result result, EnvVars envVars, BuildStub previousSuccessfulBuild, + public BuildStub(@Nonnull ProjectStub project, Result result, EnvVars envVars, BuildStub previousSuccessfulBuild, long duration, int number, BuildStub previousBuiltBuild, long timestamp, BuildStub previousNotFailedBuild) throws IOException { this(project); this.result = result; - this.envVars = envVars; + this.envVars = envVars != null ? envVars : new EnvVars(); this.previousSuccessfulBuild = previousSuccessfulBuild; this.duration = duration; this.number = number; @@ -30,7 +31,7 @@ public BuildStub(ProjectStub project, Result result, EnvVars envVars, BuildStub this.previousNotFailedBuild = previousNotFailedBuild; } - protected BuildStub(ProjectStub project) throws IOException { + protected BuildStub(@Nonnull ProjectStub project) throws IOException { super(project); } @@ -42,6 +43,7 @@ public Result getResult() { return this.result; } + @Nonnull public EnvVars getEnvironment(@Nonnull TaskListener listener) throws IOException, InterruptedException { return this.envVars; } @@ -58,6 +60,7 @@ public int getNumber() { return this.number; } + @Nonnull public ProjectStub getParent() { return project; } diff --git a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt new file mode 100644 index 000000000..b5797b7c1 --- /dev/null +++ b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt @@ -0,0 +1,13 @@ +pipeline { + agent any + stages { + stage('test'){ + steps { + script { + git branch: 'master', + url: 'file:///tmp/git-repo/' + } + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt index 7c8d474be..bbe7b5ac5 100644 --- a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt +++ b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt @@ -4,7 +4,8 @@ pipeline { stage('test'){ steps { withEnv(["GIT_COMMIT=ccccbbbb"]) { - echo "hello" + git branch: 'master', + url: 'file:///tmp/git-repo/' } } } diff --git a/src/test/resources/org/datadog/jenkins/plugins/datadog/test-config.yml b/src/test/resources/org/datadog/jenkins/plugins/datadog/test-config.yml index d7f6d45e7..c27db3948 100644 --- a/src/test/resources/org/datadog/jenkins/plugins/datadog/test-config.yml +++ b/src/test/resources/org/datadog/jenkins/plugins/datadog/test-config.yml @@ -3,6 +3,6 @@ jenkins: unclassified: datadogGlobalConfiguration: - reportWith: 'http' + reportWith: 'HTTP' targetApiKey: 'test' emitConfigChangeEvents: true From 36d59f558bcfba62fcc53845a6610694adc7949a Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Wed, 24 Jan 2024 10:05:50 +0100 Subject: [PATCH 03/17] Preserve node-name population logic --- .../datadog/traces/DatadogBasePipelineLogic.java | 11 ++++++++++- .../datadog/traces/DatadogTraceBuildLogic.java | 14 ++++++++++++-- .../datadog/traces/DatadogWebhookBuildLogic.java | 11 ++++++++++- .../listeners/DatadogGraphListenerTest.java | 4 ++++ 4 files changed, 36 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java index 88730b628..34c3136aa 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java @@ -55,7 +55,16 @@ protected String getNodeName(BuildPipelineNode current, BuildData buildData) { if (current.getNodeName() != null) { return current.getNodeName(); } - return buildData.getNodeName(""); + // It seems like "built-in" node as the default value does not have much practical sense. + // It is done to preserve existing behavior (note that this logic is not applied to metrics - also to preserve the plugin's existing behavior). + // The mechanism before the changes was the following: + // - DatadogBuildListener#onInitialize created a BuildData instance + // - that BuildData had its nodeName populated from environment variables obtained from Run + // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback) + // For pipelines, the environment variables that Run#getEnvironment returns _at the beginning of the run_ always (!) contain NODE_NAME = "built-in" + // This is true regardless of whether the pipeline definition has a top-level agent block or not + // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build + return buildData.getNodeName("built-in"); } protected String getNodeHostname(BuildPipelineNode current, BuildData buildData) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java index 4cf0a3346..b0a305cd8 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java @@ -84,8 +84,18 @@ public TraceSpan toSpan(final BuildData buildData, final Run run) { buildSpan.putMeta(CITags.CI_PARAMETERS, DatadogUtilities.toJson(buildData.getBuildParameters())); } + // It seems like "built-in" node as the default value does not have much practical sense. + // It is done to preserve existing behavior (note that this logic is not applied to metrics - also to preserve the plugin's existing behavior). + // The mechanism before the changes was the following: + // - DatadogBuildListener#onInitialize created a BuildData instance + // - that BuildData had its nodeName populated from environment variables obtained from Run + // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback) + // For pipelines, the environment variables that Run#getEnvironment returns _at the beginning of the run_ always (!) contain NODE_NAME = "built-in" + // This is true regardless of whether the pipeline definition has a top-level agent block or not + // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build + String nodeName = buildData.getNodeName("built-in"); buildSpan.putMeta(CITags.WORKSPACE_PATH, buildData.getWorkspace("")); - buildSpan.putMeta(CITags.NODE_NAME, buildData.getNodeName("")); + buildSpan.putMeta(CITags.NODE_NAME, nodeName); final String nodeLabelsJson = DatadogUtilities.toJson(getNodeLabels(run, buildData.getNodeName(""))); if(nodeLabelsJson != null && !nodeLabelsJson.isEmpty()){ @@ -95,7 +105,7 @@ public TraceSpan toSpan(final BuildData buildData, final Run run) { } // If the NodeName == "master", we don't set _dd.hostname. It will be overridden by the Datadog Agent. (Traces are only available using Datadog Agent) - if(!DatadogUtilities.isMainNode(buildData.getNodeName(""))) { + if(!DatadogUtilities.isMainNode(nodeName)) { final String workerHostname = buildData.getHostname(""); buildSpan.putMeta(CITags._DD_HOSTNAME, !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java index 1dfc0f545..0a0d2a7a2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java @@ -154,7 +154,16 @@ public JSONObject toJson(final BuildData buildData, final Run run) { { JSONObject nodePayload = new JSONObject(); - final String nodeName = buildData.getNodeName(""); + // It seems like "built-in" node as the default value does not have much practical sense. + // It is done to preserve existing behavior (note that this logic is not applied to metrics - also to preserve the plugin's existing behavior). + // The mechanism before the changes was the following: + // - DatadogBuildListener#onInitialize created a BuildData instance + // - that BuildData had its nodeName populated from environment variables obtained from Run + // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback) + // For pipelines, the environment variables that Run#getEnvironment returns _at the beginning of the run_ always (!) contain NODE_NAME = "built-in" + // This is true regardless of whether the pipeline definition has a top-level agent block or not + // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build + final String nodeName = buildData.getNodeName("built-in"); nodePayload.put("name", nodeName); if(!DatadogUtilities.isMainNode(nodeName)) { final String workerHostname = buildData.getHostname(""); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index 893b18f97..268f9801d 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -729,9 +729,13 @@ public void testIntegrationPipelineQueueTimeOnStages() throws Exception { final TraceSpan buildSpan = searchSpan(spans, "pipelineIntegrationQueueTimeOnStages"); assertEquals(Double.valueOf(0), buildSpan.getMetrics().get(CITags.QUEUE_TIME)); + assertEquals("built-in", buildSpan.getMeta().get(CITags.NODE_NAME)); + assertEquals("[\"built-in\"]", buildSpan.getMeta().get(CITags.NODE_LABELS)); final TraceSpan runStages = searchSpan(spans, "Run stages"); assertEquals(Double.valueOf(0), runStages.getMetrics().get(CITags.QUEUE_TIME)); + assertEquals("built-in", runStages.getMeta().get(CITags.NODE_NAME)); + assertEquals("[\"built-in\"]", runStages.getMeta().get(CITags.NODE_LABELS)); final TraceSpan stage1 = searchSpan(spans, "Stage 1"); final Double stage1QueueTime = stage1.getMetrics().get(CITags.QUEUE_TIME); From 1419368708b2a2534eb11e160e58f699a7cba898 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Wed, 24 Jan 2024 12:14:55 +0100 Subject: [PATCH 04/17] Fix webhook tests --- .../plugins/datadog/listeners/DatadogBuildListenerIT.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index 3c159bb75..f552a7b71 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -271,6 +271,8 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { @Test public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { + clientStub.configureForWebhooks(); + Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); @@ -373,6 +375,8 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { @Test public void testGitAlternativeRepoUrlWebhook() throws Exception { + clientStub.configureForWebhooks(); + Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); @@ -447,6 +451,8 @@ public void testCITagsOnTraces() throws Exception { @Test public void testCITagsOnWebhooks() throws Exception { + clientStub.configureForWebhooks(); + DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor(); cfg.setGlobalJobTags("(.*?)_job, global_job_tag:$ENV_VAR"); cfg.setGlobalTags("global_tag:$ENV_VAR"); From ddfef46c8db91cde417747f75722efa3d2de2913 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Wed, 24 Jan 2024 20:28:28 +0100 Subject: [PATCH 05/17] Address review comments --- .../datadog/traces/write/TraceWriter.java | 83 ++++++++++--------- 1 file changed, 46 insertions(+), 37 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index fc532fe05..3611eec8b 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -22,26 +22,29 @@ public class TraceWriter { private static final String QUEUE_CAPACITY_ENV_VAR = "DD_JENKINS_TRACES_QUEUE_CAPACITY"; private static final String SUBMIT_TIMEOUT_ENV_VAR = "DD_JENKINS_TRACES_SUBMIT_TIMEOUT_SECONDS"; private static final String STOP_TIMEOUT_ENV_VAR = "DD_JENKINS_TRACES_STOP_TIMEOUT_SECONDS"; - private static final String POLLING_INTERVAL_ENV_VAR = "DD_JENKINS_TRACES_POLLING_INTERVAL_SECONDS"; + private static final String POLLING_TIMEOUT_ENV_VAR = "DD_JENKINS_TRACES_POLLING_TIMEOUT_SECONDS"; private static final String BATCH_SIZE_LIMIT_ENV_VAR = "DD_JENKINS_TRACES_BATCH_SIZE_LIMIT"; private static final int DEFAULT_QUEUE_CAPACITY = 10_000; private static final int DEFAULT_SUBMIT_TIMEOUT_SECONDS = 30; - private static final int DEFAULT_STOP_TIMEOUT_SECONDS = 15; - private static final int DEFAULT_POLLING_INTERVAL_SECONDS = 10; + private static final int DEFAULT_STOP_TIMEOUT_SECONDS = 10; + private static final int DEFAULT_POLLING_TIMEOUT_SECONDS = 5; private static final int DEFAULT_BATCH_SIZE_LIMIT = 100; private final TraceWriteStrategy traceWriteStrategy; private final BlockingQueue queue; private final Thread poller; + private final Thread pollerShutdownHook; public TraceWriter(DatadogClient datadogClient) { this.traceWriteStrategy = datadogClient.createTraceWriteStrategy(); this.queue = new ArrayBlockingQueue<>(getEnv(QUEUE_CAPACITY_ENV_VAR, DEFAULT_QUEUE_CAPACITY)); - this.poller = new Thread(new QueuePoller(traceWriteStrategy, queue), "DD-Trace-Writer"); - this.poller.setDaemon(true); + this.poller = new Thread(this::runPollingLoop, "DD-Trace-Writer"); this.poller.start(); + + this.pollerShutdownHook = new Thread(this::runShutdownHook, "DD-Trace-Writer-Shutdown-Hook"); + Runtime.getRuntime().addShutdownHook(pollerShutdownHook); } public void submitBuild(final BuildData buildData, final Run run) throws InterruptedException, TimeoutException { @@ -66,41 +69,47 @@ public void stop() { poller.interrupt(); } - private static final class QueuePoller implements Runnable { - private final TraceWriteStrategy traceWriteStrategy; - private final BlockingQueue queue; + private void runPollingLoop() { + long stopPollingAt = Long.MAX_VALUE; + while (System.currentTimeMillis() < stopPollingAt) { + try { + JSONObject span = queue.poll(getEnv(POLLING_TIMEOUT_ENV_VAR, DEFAULT_POLLING_TIMEOUT_SECONDS), TimeUnit.SECONDS); + if (span == null) { + // nothing to send + continue; + } - public QueuePoller(TraceWriteStrategy traceWriteStrategy, BlockingQueue queue) { - this.traceWriteStrategy = traceWriteStrategy; - this.queue = queue; - } + int batchSize = getEnv(BATCH_SIZE_LIMIT_ENV_VAR, DEFAULT_BATCH_SIZE_LIMIT); + List spans = new ArrayList<>(batchSize); + spans.add(span); + queue.drainTo(spans, batchSize - 1); - @Override - public void run() { - long shutdownAt = Long.MAX_VALUE; - while (System.currentTimeMillis() < shutdownAt) { - try { - JSONObject span = queue.poll(getEnv(POLLING_INTERVAL_ENV_VAR, DEFAULT_POLLING_INTERVAL_SECONDS), TimeUnit.SECONDS); - if (span == null) { - continue; // nothing to send - } - - int batchSize = getEnv(BATCH_SIZE_LIMIT_ENV_VAR, DEFAULT_BATCH_SIZE_LIMIT); - List spans = new ArrayList<>(batchSize); - spans.add(span); - queue.drainTo(spans, batchSize - 1); - - traceWriteStrategy.send(spans); - - } catch (InterruptedException e) { - logger.info("Queue poller thread interrupted"); - shutdownAt = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(getEnv(STOP_TIMEOUT_ENV_VAR, DEFAULT_STOP_TIMEOUT_SECONDS)); - - } catch (Exception e) { - DatadogUtilities.severe(logger, e, "Error while sending trace"); - } + traceWriteStrategy.send(spans); + + } catch (InterruptedException e) { + logger.info("Queue poller thread interrupted"); + stopPollingAt = Math.min(stopPollingAt, System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(getEnv(STOP_TIMEOUT_ENV_VAR, DEFAULT_STOP_TIMEOUT_SECONDS))); + + } catch (Exception e) { + DatadogUtilities.severe(logger, e, "Error while sending trace"); } - logger.info("Queue poller thread shut down"); + } + logger.info("Queue polling stopped, spans not flushed: " + queue.size()); + + try { + Runtime.getRuntime().removeShutdownHook(pollerShutdownHook); + } catch (IllegalStateException e) { + // JVM is being shutdown, the hook has already been called + } + } + + private void runShutdownHook() { + stop(); + try { + // delay JVM shutdown until remaining spans are sent (or until timeout) + poller.join(TimeUnit.SECONDS.toMillis(getEnv(STOP_TIMEOUT_ENV_VAR, DEFAULT_STOP_TIMEOUT_SECONDS))); + } catch (InterruptedException e) { + // ignore, should be impossible to end up here } } From 7bf5c20aa0b7cc297451cf80625c940af8269412 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Wed, 24 Jan 2024 20:52:04 +0100 Subject: [PATCH 06/17] Fix SpotBugs warning --- .../plugins/datadog/traces/write/TraceWriter.java | 15 +++++++++------ .../datadog/traces/write/TraceWriterFactory.java | 1 + 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index 3611eec8b..ad1f9782a 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -15,7 +15,7 @@ import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.jenkinsci.plugins.workflow.graph.FlowNode; -public class TraceWriter { +public final class TraceWriter { private static final Logger logger = Logger.getLogger(TraceWriter.class.getName()); @@ -41,12 +41,19 @@ public TraceWriter(DatadogClient datadogClient) { this.queue = new ArrayBlockingQueue<>(getEnv(QUEUE_CAPACITY_ENV_VAR, DEFAULT_QUEUE_CAPACITY)); this.poller = new Thread(this::runPollingLoop, "DD-Trace-Writer"); - this.poller.start(); this.pollerShutdownHook = new Thread(this::runShutdownHook, "DD-Trace-Writer-Shutdown-Hook"); Runtime.getRuntime().addShutdownHook(pollerShutdownHook); } + public void start() { + poller.start(); + } + + public void stop() { + poller.interrupt(); + } + public void submitBuild(final BuildData buildData, final Run run) throws InterruptedException, TimeoutException { JSONObject buildJson = traceWriteStrategy.serialize(buildData, run); submit(buildJson); @@ -65,10 +72,6 @@ private void submit(JSONObject json) throws InterruptedException, TimeoutExcepti } } - public void stop() { - poller.interrupt(); - } - private void runPollingLoop() { long stopPollingAt = Long.MAX_VALUE; while (System.currentTimeMillis() < stopPollingAt) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java index d8691f3dd..5de0bb808 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java @@ -18,6 +18,7 @@ public static synchronized void onDatadogClientUpdate(@Nullable DatadogClient cl } TRACE_WRITER = new TraceWriter(client); + TRACE_WRITER.start(); } @Nullable From 7eefea63a2ea8ccec47fbd9aa5c4f297ab415535 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Thu, 25 Jan 2024 13:27:41 +0100 Subject: [PATCH 07/17] Fix integration tests --- .../plugins/datadog/DatadogUtilities.java | 59 ++-- .../datadog/listeners/DatadogSCMListener.java | 18 +- .../listeners/DatadogStepListener.java | 13 +- .../plugins/datadog/model/BuildData.java | 36 --- .../plugins/datadog/util/git/GitUtils.java | 6 +- .../listeners/DatadogBuildListenerIT.java | 85 +++--- .../listeners/DatadogGraphListenerTest.java | 252 +++++++----------- .../listeners/DatadogTraceAbstractTest.java | 8 +- .../testPipelineSuccessLocalCheckout.txt | 2 +- .../testPipelinesOverrideGitCommit.txt | 2 +- 10 files changed, 216 insertions(+), 265 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java index cc22d68a2..c6d79226a 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java @@ -39,13 +39,12 @@ of this software and associated documentation files (the "Software"), to deal import java.io.InputStream; import java.io.InputStreamReader; import java.net.Inet4Address; -import java.net.MalformedURLException; -import java.net.URL; import java.net.UnknownHostException; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; @@ -98,6 +97,7 @@ public class DatadogUtilities { /** * @return - The descriptor for the Datadog plugin. In this case the global configuration. */ + @Nullable public static DatadogGlobalConfiguration getDatadogGlobalDescriptor() { try { return ExtensionList.lookupSingleton(DatadogGlobalConfiguration.class); @@ -586,6 +586,14 @@ public static String getHostname(EnvVars envVars) { return null; } + private static final Pattern VALID_HOSTNAME_RFC_1123_PATTERN = Pattern.compile("^(([a-zA-Z0-9]|" + + "[a-zA-Z0-9][a-zA-Z0-9\\-]*[a-zA-Z0-9])\\.)*" + + "([A-Za-z0-9]|" + + "[A-Za-z0-9][A-Za-z0-9\\-]*[A-Za-z0-9])$"); + + private static final Collection LOCAL_HOSTS = Arrays.asList("localhost", "localhost.localdomain", + "localhost6.localdomain6", "ip6-localhost"); + /** * Validator function to ensure that the hostname is valid. Also, fails on * empty String. @@ -598,20 +606,17 @@ public static Boolean isValidHostname(String hostname) { return false; } - String[] localHosts = {"localhost", "localhost.localdomain", - "localhost6.localdomain6", "ip6-localhost"}; - String VALID_HOSTNAME_RFC_1123_PATTERN = "^(([a-zA-Z0-9]|" - + "[a-zA-Z0-9][a-zA-Z0-9\\-]*[a-zA-Z0-9])\\.)*" - + "([A-Za-z0-9]|" - + "[A-Za-z0-9][A-Za-z0-9\\-]*[A-Za-z0-9])$"; - String host = hostname.toLowerCase(); - // Check if hostname is local - if (Arrays.asList(localHosts).contains(host)) { + if (LOCAL_HOSTS.contains(hostname.toLowerCase())) { logger.fine(String.format("Hostname: %s is local", hostname)); return false; } + if (isPrivateIPv4Address(hostname)) { + logger.fine(String.format("Hostname: %s is a private IPv4 address", hostname)); + return false; + } + // Ensure proper length if (hostname.length() > MAX_HOSTNAME_LEN) { logger.fine(String.format("Hostname: %s is too long (max length is %s characters)", @@ -619,14 +624,38 @@ public static Boolean isValidHostname(String hostname) { return false; } - // Check compliance with RFC 1123 - Pattern r = Pattern.compile(VALID_HOSTNAME_RFC_1123_PATTERN); - Matcher m = r.matcher(hostname); - // Final check: Hostname matches RFC1123? + Matcher m = VALID_HOSTNAME_RFC_1123_PATTERN.matcher(hostname); return m.find(); } + private static boolean isPrivateIPv4Address(String ipAddress) { + if (ipAddress == null || ipAddress.isEmpty()) { + return false; + } + + String[] parts = ipAddress.split("\\."); + if (parts.length != 4) { + return false; + } + + try { + int firstOctet = Integer.parseInt(parts[0]); + int secondOctet = Integer.parseInt(parts[1]); + + if (firstOctet == 10) { + return true; + } else if (firstOctet == 172 && (secondOctet >= 16 && secondOctet <= 31)) { + return true; + } else if (firstOctet == 192 && secondOctet == 168) { + return true; + } + return false; + } catch (NumberFormatException e) { + return false; + } + } + public static Map> getComputerTags(Computer computer) { Set labels = null; try { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java index 40b1dee16..04d00ae9d 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogSCMListener.java @@ -41,6 +41,7 @@ of this software and associated documentation files (the "Software"), to deal import java.util.Map; import java.util.Set; import java.util.logging.Logger; +import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogEvent; @@ -89,13 +90,16 @@ public void onCheckout(Run build, SCM scm, FilePath workspace, TaskListene return; } + logger.fine("Start DatadogSCMListener#onCheckout"); + if (isGit(scm)) { EnvVars environment = build.getEnvironment(listener); GitClient gitClient = GitUtils.newGitClient(listener, environment, workspace); - if (gitClient != null) { - populateCommitInfo(build, gitClient); - populateRepositoryInfo(build, gitClient, environment); - } + populateCommitInfo(build, gitClient); + populateRepositoryInfo(build, gitClient, environment); + } else { + logger.fine("Will not populate git commit and repository info for non-git SCM: " + + (scm != null ? scm.getType() : null)); } DatadogJobProperty prop = DatadogUtilities.getDatadogJobProperties(build); @@ -103,8 +107,6 @@ public void onCheckout(Run build, SCM scm, FilePath workspace, TaskListene return; } - logger.fine("Start DatadogSCMListener#onCheckout"); - // Get Datadog Client Instance DatadogClient client = ClientFactory.getClient(); if (client == null) { @@ -146,7 +148,7 @@ private boolean isGit(SCM scm) { return scmType != null && scmType.toLowerCase().contains("git"); } - private void populateCommitInfo(final Run run, final GitClient gitClient) { + private void populateCommitInfo(final Run run, @Nullable final GitClient gitClient) { long start = System.currentTimeMillis(); try { GitCommitAction commitAction = run.getAction(GitCommitAction.class); @@ -200,7 +202,7 @@ private void populateCommitInfo(final Run run, final GitClient gitClient) } } - private void populateRepositoryInfo(final Run run, final GitClient gitClient, final EnvVars environment) { + private void populateRepositoryInfo(final Run run, @Nullable final GitClient gitClient, final EnvVars environment) { long start = System.currentTimeMillis(); try { GitRepositoryAction repoAction = run.getAction(GitRepositoryAction.class); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java index be217cc22..73fa89a22 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java @@ -138,11 +138,11 @@ private static String getNodeHostname(final StepContext stepContext) { EnvVars envVars = stepContext.get(EnvVars.class); if (envVars != null) { String ddHostname = envVars.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME); - if (ddHostname != null) { + if (DatadogUtilities.isValidHostname(ddHostname)) { return ddHostname; } String hostname = envVars.get("HOSTNAME"); - if (hostname != null) { + if (DatadogUtilities.isValidHostname(hostname)) { return hostname; } } @@ -153,7 +153,14 @@ private static String getNodeHostname(final StepContext stepContext) { try { Computer computer = stepContext.get(Computer.class); if(computer != null) { - return computer.getHostName(); + String computerHostName = computer.getHostName(); + if (DatadogUtilities.isValidHostname(computerHostName)) { + return computerHostName; + } + String computerNodeName = DatadogUtilities.getNodeName(computer); + if (DatadogUtilities.isMainNode(computerNodeName)) { + return DatadogUtilities.getHostname(null); + } } } catch (Exception e){ logger.fine("Unable to extract hostname from StepContext."); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java index be705e267..37e8b903d 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java @@ -641,10 +641,6 @@ public String getExecutorNumber(String value) { return defaultIfNull(executorNumber, value); } - public String getJavaHome(String value) { - return defaultIfNull(javaHome, value); - } - public String getWorkspace(String value) { return defaultIfNull(workspace, value); } @@ -661,66 +657,34 @@ public String getGitMessage(String value) { return defaultIfNull(gitMessage, value); } - public void setGitMessage(String gitMessage) { - this.gitMessage = gitMessage; - } - public String getGitAuthorName(final String value) { return defaultIfNull(gitAuthorName, value); } - public void setGitAuthorName(String gitAuthorName) { - this.gitAuthorName = gitAuthorName; - } - public String getGitAuthorEmail(final String value) { return defaultIfNull(gitAuthorEmail, value); } - public void setGitAuthorEmail(String gitAuthorEmail) { - this.gitAuthorEmail = gitAuthorEmail; - } - public String getGitCommitterName(final String value) { return defaultIfNull(gitCommitterName, value); } - public void setGitCommitterName(String gitCommitterName) { - this.gitCommitterName = gitCommitterName; - } - public String getGitCommitterEmail(final String value) { return defaultIfNull(gitCommitterEmail, value); } - public void setGitCommitterEmail(String gitCommitterEmail) { - this.gitCommitterEmail = gitCommitterEmail; - } - public String getGitAuthorDate(final String value) { return defaultIfNull(gitAuthorDate, value); } - public void setGitAuthorDate(String gitAuthorDate) { - this.gitAuthorDate = gitAuthorDate; - } - public String getGitCommitterDate(final String value) { return defaultIfNull(gitCommitterDate, value); } - public void setGitCommitterDate(String gitCommitterDate) { - this.gitCommitterDate = gitCommitterDate; - } - public String getGitDefaultBranch(String value) { return defaultIfNull(gitDefaultBranch, value); } - public void setGitDefaultBranch(String gitDefaultBranch) { - this.gitDefaultBranch = gitDefaultBranch; - } - public String getGitTag(String value) { return defaultIfNull(gitTag, value); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java index 7ce19c601..5a1c8cab1 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java @@ -19,6 +19,7 @@ import java.util.Map; import java.util.logging.Logger; import java.util.regex.Pattern; +import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit; import org.datadog.jenkins.plugins.datadog.model.GitCommitAction; @@ -44,7 +45,7 @@ private GitUtils() { * @param gitClient the Git client used. * @return revCommit */ - public static RevCommit searchRevCommit(final GitClient gitClient, final String gitCommit) { + public static RevCommit searchRevCommit(@Nullable final GitClient gitClient, final String gitCommit) { try { if (gitClient == null) { LOGGER.fine("Unable to search RevCommit. GitClient is null"); @@ -64,7 +65,7 @@ public static RevCommit searchRevCommit(final GitClient gitClient, final String * @param gitClient The Git client to use to obtain the repository information * @return repositoryInfo */ - public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient) { + public static RepositoryInfo searchRepositoryInfo(@Nullable final GitClient gitClient) { try { if (gitClient == null) { LOGGER.fine("Unable to search RevCommit. GitClient is null"); @@ -86,6 +87,7 @@ public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient) { * @param workspace the workspace to use to build the Git client * @return gitClient */ + @Nullable public static GitClient newGitClient(final TaskListener listener, final EnvVars envVars, final FilePath workspace) { long start = System.currentTimeMillis(); try { diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index 33b476f3b..d98433c04 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -27,13 +27,11 @@ import hudson.model.Label; import hudson.plugins.git.BranchSpec; import hudson.plugins.git.GitSCM; -import hudson.plugins.git.browser.GitRepositoryBrowser; import hudson.plugins.git.extensions.impl.LocalBranch; import hudson.slaves.DumbSlave; import hudson.slaves.EnvironmentVariablesNodeProperty; import java.io.IOException; import java.io.InputStream; -import java.net.URL; import java.util.Collections; import java.util.List; import java.util.Map; @@ -48,10 +46,11 @@ import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; -import org.jvnet.hudson.test.ExtractResourceSCM; import org.jvnet.hudson.test.JenkinsRule; public class DatadogBuildListenerIT extends DatadogTraceAbstractTest { @@ -59,12 +58,31 @@ public class DatadogBuildListenerIT extends DatadogTraceAbstractTest { private static final String SAMPLE_SERVICE_NAME = "sampleServiceName"; @ClassRule - public static JenkinsRule jenkinsRule = new JenkinsRule(); + public static final JenkinsRule jenkinsRule = new JenkinsRule(); + + private static FilePath localGitRepoPath; + private DatadogClientStub clientStub; - static { + @BeforeClass + public static void setUp() { // to allow checkout from local git repositories - needed for some tests GitSCM.ALLOW_LOCAL_CHECKOUT = true; + + localGitRepoPath = jenkinsRule.jenkins.getRootPath().child("tmp").child("git-repo"); + try (InputStream gitZip = DatadogBuildListenerIT.class.getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) { + localGitRepoPath.deleteRecursive(); + localGitRepoPath.mkdirs(); + localGitRepoPath.unzipFrom(gitZip); + + } catch (Exception e) { + throw new RuntimeException("Could not create local git repo at " + localGitRepoPath.getRemote(), e); + } + } + + @AfterClass + public static void tearDown() throws IOException, InterruptedException { + localGitRepoPath.deleteRecursive(); } @Before @@ -122,14 +140,12 @@ public void testTraces() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file:///tmp/git-repo/"); + env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); jenkins.getGlobalNodeProperties().add(prop); - createLocallyAvailableGitRepo(jenkins); - final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccess"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); final FilePath ws = jenkins.getWorkspaceFor(project); @@ -144,7 +160,7 @@ public void testTraces() throws Exception { assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "master"); + assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); final Map meta = buildSpan.getMeta(); final Map metrics = buildSpan.getMetrics(); assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_BUILD_LEVEL)); @@ -174,15 +190,6 @@ public void testTraces() throws Exception { assertCleanupActions(run); } - private void createLocallyAvailableGitRepo(Jenkins jenkins) throws IOException, InterruptedException { - try (InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) { - FilePath gitRepoPath = jenkins.createPath("/tmp/git-repo"); - gitRepoPath.deleteRecursive(); - gitRepoPath.mkdirs(); - gitRepoPath.unzipFrom(gitZip); - } - } - @Test public void testGitDefaultBranch() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; @@ -190,16 +197,14 @@ public void testGitDefaultBranch() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file:///tmp/git-repo/"); + env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); final String defaultBranch = "refs/heads/hardcoded-master"; env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch); jenkins.getGlobalNodeProperties().add(prop); - createLocallyAvailableGitRepo(jenkins); - final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessDefaultBranch"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -209,7 +214,7 @@ public void testGitDefaultBranch() throws Exception { assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "hardcoded-master"); + assertGitVariablesOnSpan(buildSpan, "hardcoded-master", localGitRepoPath.getRemote()); } @Test @@ -221,18 +226,16 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); + env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); final String defaultBranch = "refs/heads/hardcoded-master"; env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); jenkins.getGlobalNodeProperties().add(prop); - createLocallyAvailableGitRepo(jenkins); - final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithoutCommitInfo"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -242,7 +245,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "hardcoded-master"); + assertGitVariablesOnSpan(buildSpan, "hardcoded-master", localGitRepoPath.getRemote()); } @Test @@ -253,7 +256,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { env.put(GIT_REPOSITORY_URL, "not-valid-repo"); env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); + env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -267,11 +270,9 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); jenkins.getGlobalNodeProperties().add(prop); - createLocallyAvailableGitRepo(jenkins); - final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfo"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -292,7 +293,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("file:///tmp/git-repo/", meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file://" + localGitRepoPath.getRemote(), meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH)); } @@ -306,7 +307,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { env.put(GIT_REPOSITORY_URL, "not-valid-repo"); env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); + env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -320,11 +321,9 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); jenkins.getGlobalNodeProperties().add(prop); - createLocallyAvailableGitRepo(jenkins); - final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfoWebhook"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -344,7 +343,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { assertEquals("hardcoded-committer-date", meta.getString("commit_time")); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.getString("sha")); assertEquals("master", meta.getString("branch")); - assertEquals("file:///tmp/git-repo/", meta.getString("repository_url")); + assertEquals("file://" + localGitRepoPath.getRemote(), meta.getString("repository_url")); assertEquals("hardcoded-master", meta.getString("default_branch")); } @@ -411,14 +410,12 @@ public void testGitAlternativeRepoUrlWebhook() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "file:///tmp/git-repo/"); + env.put("GIT_URL_1", "file://" + localGitRepoPath.getRemote()); jenkins.getGlobalNodeProperties().add(prop); - createLocallyAvailableGitRepo(jenkins); - final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessAltRepoUrlWebhook"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file:///tmp/git-repo/", null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); final FilePath ws = jenkins.getWorkspaceFor(project); @@ -432,7 +429,7 @@ public void testGitAlternativeRepoUrlWebhook() throws Exception { assertEquals(1, webhooks.size()); final JSONObject webhook = webhooks.get(0); - assertGitVariablesOnWebhook(webhook, "master"); + assertGitVariablesOnWebhook(webhook, "master", localGitRepoPath.getRemote()); } @Test diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index 268f9801d..63a1bea69 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -33,14 +33,19 @@ import hudson.triggers.SCMTrigger.SCMTriggerCause; import hudson.triggers.TimerTrigger.TimerTriggerCause; import java.io.BufferedReader; +import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import jenkins.model.Jenkins; import net.sf.json.JSONObject; import org.apache.commons.io.IOUtils; @@ -62,7 +67,9 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; +import org.junit.AfterClass; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; @@ -74,13 +81,34 @@ public class DatadogGraphListenerTest extends DatadogTraceAbstractTest { @ClassRule public static JenkinsRule jenkinsRule; + private static FilePath localGitRepoPath; + static { // to allow checkout from local git repositories - needed for some tests System.setProperty("hudson.plugins.git.GitSCM.ALLOW_LOCAL_CHECKOUT", "true"); + jenkinsRule = new JenkinsRule(); jenkinsRule.timeout = 600; // default value of 180 is too small for all the test cases in this class } + @BeforeClass + public static void setUp() { + localGitRepoPath = jenkinsRule.jenkins.getRootPath().child("tmp").child("git-repo"); + try (InputStream gitZip = DatadogBuildListenerIT.class.getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) { + localGitRepoPath.deleteRecursive(); + localGitRepoPath.mkdirs(); + localGitRepoPath.unzipFrom(gitZip); + + } catch (Exception e) { + throw new RuntimeException("Could not create local git repo at " + localGitRepoPath.getRemote(), e); + } + } + + @AfterClass + public static void tearDown() throws IOException, InterruptedException { + localGitRepoPath.deleteRecursive(); + } + private DatadogGraphListener listener; private DatadogClientStub clientStub; @@ -154,10 +182,7 @@ public void testIntegration() throws Exception { windowsEnvVars.put("HOSTNAME", windowsHostname); jenkinsRule.createOnlineSlave(new LabelAtom("windows"), windowsEnvVars); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineDefinition.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineDefinition.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); WorkflowRun run = job.scheduleBuild2(0).get(); BufferedReader br = new BufferedReader(run.getLogReader()); @@ -217,13 +242,10 @@ public void testIntegrationGitInfo() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file:///tmp/git-repo/"); + env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommit"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -239,19 +261,16 @@ public void testIntegrationGitInfo() throws Exception { final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "master"); + assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); } @Test public void testIntegrationNonCIVisibilityEnvVars() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; WorkflowJob job = jenkins.createProject(WorkflowJob.class, "testPipelineGitBranchEnv"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineGitBranchEnv.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineGitBranchEnv.txt"); - String[] expectedTags = new String[]{ + String[] expectedTags = new String[]{ "jenkins_url:" + DatadogUtilities.getJenkinsUrl(), "user_id:anonymous", "job:testPipelineGitBranchEnv", @@ -274,13 +293,10 @@ public void testIntegrationGitInfoWebhooks() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file:///tmp/git-repo/"); + env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitWebhooks"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -295,7 +311,7 @@ public void testIntegrationGitInfoWebhooks() throws Exception { clientStub.waitForWebhooks(3); final List webhook = clientStub.getWebhooks(); assertEquals(3, webhook.size()); - assertGitVariablesOnWebhook(webhook.get(0), "master"); + assertGitVariablesOnWebhook(webhook.get(0), "master", localGitRepoPath.getRemote()); } @Test @@ -305,15 +321,12 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file:///tmp/git-repo/"); + env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); final String defaultBranch = "refs/heads/hardcoded-master"; env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitDefaultBranchEnvVar"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -329,7 +342,7 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "hardcoded-master"); + assertGitVariablesOnSpan(buildSpan, "hardcoded-master", localGitRepoPath.getRemote()); } @Test @@ -339,13 +352,10 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file:///tmp/git-repo/"); + env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationOverrideCommit"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelinesOverrideGitCommit.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -365,6 +375,27 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { } } + private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\$([A-Z_]+)"); + + private String getPipelineDefinition(String file) throws IOException { + Map replacements = Collections.singletonMap("LOCAL_REPO_URL", "file://" + localGitRepoPath.getRemote()); + + String pipelineDefinition; + try (InputStream is = DatadogGraphListenerTest.class.getResourceAsStream(file)) { + StringBuffer pipelineBuilder = new StringBuffer(); + String pipelineTemplate = IOUtils.toString(is, StandardCharsets.UTF_8); + Matcher m = PLACEHOLDER_PATTERN.matcher(pipelineTemplate); + while (m.find()) { + String placeholder = m.group(1); + m.appendReplacement(pipelineBuilder, replacements.get(placeholder)); + } + m.appendTail(pipelineBuilder); + pipelineDefinition = pipelineBuilder.toString(); + } + + return pipelineDefinition; + } + @Test public void testIntegrationGitAlternativeRepoUrl() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; @@ -372,13 +403,10 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "file:///tmp/git-repo/"); + env.put("GIT_URL_1", "file://" + localGitRepoPath.getRemote()); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrl"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelinesOverrideGitCommit.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -394,7 +422,7 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { final List spans = clientStub.getSpans(); assertEquals(5, spans.size()); for(TraceSpan span : spans) { - assertEquals("file:///tmp/git-repo/", span.getMeta().get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file://" + localGitRepoPath.getRemote(), span.getMeta().get(CITags.GIT_REPOSITORY_URL)); } } @@ -407,13 +435,10 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "file:///tmp/git-repo/"); + env.put("GIT_URL_1", "file://" + localGitRepoPath.getRemote()); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrlWebhooks"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelinesOverrideGitCommit.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -429,7 +454,7 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { final List webhooks = clientStub.getWebhooks(); assertEquals(5, webhooks.size()); for(JSONObject webhook : webhooks) { - assertEquals("file:///tmp/git-repo/", webhook.getJSONObject("git").get("repository_url")); + assertEquals("file://" + localGitRepoPath.getRemote(), webhook.getJSONObject("git").get("repository_url")); } } @@ -438,16 +463,13 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); + env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_TAG, "0.1.0"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithoutCommitInfo"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -463,7 +485,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "master"); + assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); final Map meta = buildSpan.getMeta(); assertEquals("0.1.0", meta.get(CITags.GIT_TAG)); } @@ -484,16 +506,13 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); + env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_TAG, "0.1.0"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithoutCommitInfoWebhooks"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -509,7 +528,7 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { final List webhooks = clientStub.getWebhooks(); assertEquals(3, webhooks.size()); final JSONObject webhook = webhooks.get(0); - assertGitVariablesOnWebhook(webhook, "master"); + assertGitVariablesOnWebhook(webhook, "master", localGitRepoPath.getRemote()); assertEquals("0.1.0", webhook.getJSONObject("git").get("tag")); } @@ -518,7 +537,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "file:///tmp/git-repo/"); + env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -531,10 +550,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { final String defaultBranch = "refs/heads/hardcoded-master"; env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithCommitInfo"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccessLocalCheckout.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final FilePath ws = jenkins.getWorkspaceFor(job); @@ -561,7 +577,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("file:///tmp/git-repo/", meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file://" + localGitRepoPath.getRemote(), meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH)); } @@ -574,10 +590,7 @@ public void testRawRepositoryUrl() throws Exception { env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationRawRepositoryUrl"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); jenkins.getGlobalNodeProperties().add(prop); @@ -603,10 +616,7 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationFilterSensitiveInfoRepoUrl"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); jenkins.getGlobalNodeProperties().add(prop); @@ -626,10 +636,7 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception { @Test public void testStageNamePropagation() throws Exception{ WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationStages"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineStages.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineStages.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); new Thread(() -> { try { @@ -664,10 +671,7 @@ public void testStageNamePropagationWebhook() throws Exception{ clientStub.configureForWebhooks(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationStagesWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineStagesWebhook.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineStagesWebhook.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); new Thread(() -> { try { @@ -707,10 +711,7 @@ public void testStageNamePropagationWebhook() throws Exception{ @Test public void testIntegrationPipelineQueueTimeOnStages() throws Exception { WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationQueueTimeOnStages"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineQueueOnStages.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineQueueOnStages.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); // schedule build and wait for it to get queued new Thread(() -> { @@ -815,10 +816,7 @@ public void testIntegrationPipelineQueueTimeOnPipeline() throws Exception { env.put("NODE_NAME", "testPipeline"); jenkinsRule.jenkins.getGlobalNodeProperties().add(envProps); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationQueueTimeOnPipeline"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineQueueOnPipeline.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineQueueOnPipeline.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); // schedule build and wait for it to get queued @@ -862,10 +860,7 @@ public void testIntegrationPipelineQueueTimeOnPipeline() throws Exception { public void testIntegrationNoFailureTag() throws Exception { jenkinsRule.createOnlineSlave(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSuccess"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); final WorkflowRun run = job.scheduleBuild2(0).get(); String hostname = DatadogUtilities.getHostname(null); @@ -968,10 +963,7 @@ public void testIntegrationNoFailureTag() throws Exception { @Test public void testIntegrationPipelineSkippedLogic() throws Exception { WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-SkippedLogic"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSkippedLogic.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSkippedLogic.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -989,10 +981,7 @@ public void testIntegrationPipelineSkippedLogicWebhook() throws Exception { clientStub.configureForWebhooks(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-SkippedLogicWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSkippedLogic.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSkippedLogic.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -1013,10 +1002,7 @@ public void testIntegrationTracesDisabled() throws Exception{ jenkinsRule.createOnlineSlave(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSuccess-notraces"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); String hostname = DatadogUtilities.getHostname(null); @@ -1076,10 +1062,7 @@ public void getTimeTest() { @Test public void testStagesNodeNames_complexPipelineStages01() throws Exception { WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "complexPipelineStages01"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineComplexStages01.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineComplexStages01.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); // schedule build and wait for it to get queued new Thread(() -> { @@ -1179,10 +1162,7 @@ public void testGlobalTagsPropagationsTraces() throws Exception { jenkinsRule.createOnlineSlave(new LabelAtom("testGlobalTags")); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-GlobalTagsPropagation_job"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineGlobalTags.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineGlobalTags.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -1215,10 +1195,7 @@ public void testGlobalTagsPropagationsTraces() throws Exception { @Test public void testErrorPropagationOnStages() throws Exception { WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-errorPropagationStages"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream(getFailingPipelineDefinitionName()), - "UTF-8" - ); + String definition = getPipelineDefinition(getFailingPipelineDefinitionName()); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -1245,10 +1222,7 @@ public void testErrorPropagationOnStagesWebhook() throws Exception { clientStub.configureForWebhooks(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-errorPropagationStagesWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream(getFailingPipelineDefinitionName()), - "UTF-8" - ); + String definition = getPipelineDefinition(getFailingPipelineDefinitionName()); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -1281,10 +1255,7 @@ private boolean isRunningOnWindows() { @Test public void testUnstablePropagationOnStages() throws Exception { WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-unstablePropagationStages"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineUnstableOnStages.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineUnstableOnStages.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -1311,10 +1282,7 @@ public void testUnstablePropagationOnStagesWebhook() throws Exception { clientStub.configureForWebhooks(); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-unstablePropagationStagesWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineUnstableOnStages.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineUnstableOnStages.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); @@ -1342,10 +1310,7 @@ public void testCustomHostnameForWorkers() throws Exception { env.put("DD_CI_HOSTNAME", "testDDCiHostname"); jenkinsRule.jenkins.getGlobalNodeProperties().add(envProps); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationCustomHostname"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineOnWorkers.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineOnWorkers.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); // schedule build and wait for it to get queued @@ -1391,10 +1356,7 @@ public void testCustomHostnameForWorkersWebhook() throws Exception { env.put("DD_CI_HOSTNAME", "testDDCiHostname"); jenkinsRule.jenkins.getGlobalNodeProperties().add(envProps); WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationCustomHostnameWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineOnWorkersWebhook.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineOnWorkersWebhook.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); // schedule build and wait for it to get queued @@ -1538,10 +1500,7 @@ private void assertNoneNameParallelStep(TraceSpan step, TraceSpan stage01, Trace public void testIsManualTrue() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualTrue"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new UserIdCause("johanna")); job.scheduleBuild2(0, causeAction).get(); @@ -1560,10 +1519,7 @@ public void testIsManualTrueWebhooks() throws Exception { clientStub.configureForWebhooks(); Jenkins jenkins = jenkinsRule.jenkins; WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualTrueWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new UserIdCause("johanna")); job.scheduleBuild2(0, causeAction).get(); @@ -1579,10 +1535,7 @@ public void testIsManualTrueWebhooks() throws Exception { public void testIsManualFalse() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualFalse"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new SCMTriggerCause("scm")); job.scheduleBuild2(0, causeAction).get(); @@ -1609,10 +1562,7 @@ public void testIsManualFalseWebhooks() throws Exception { clientStub.configureForWebhooks(); Jenkins jenkins = jenkinsRule.jenkins; WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualFalseWebhook"); - String definition = IOUtils.toString( - this.getClass().getResourceAsStream("testPipelineSuccess.txt"), - "UTF-8" - ); + String definition = getPipelineDefinition("testPipelineSuccess.txt"); job.setDefinition(new CpsFlowDefinition(definition, true)); CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new SCMTriggerCause("scm")); job.scheduleBuild2(0, causeAction).get(); @@ -1726,7 +1676,7 @@ public void testUnstablePropagationOnNestedStages() throws Exception { private void givenPipeline(String name, String definitionPath) throws Exception { WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, name); - String definition = IOUtils.toString(this.getClass().getResourceAsStream(definitionPath), "UTF-8"); + String definition = getPipelineDefinition(definitionPath); job.setDefinition(new CpsFlowDefinition(definition, true)); job.scheduleBuild2(0).get(); } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java index 3ea69acbb..9f1c47cf2 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java @@ -18,7 +18,7 @@ public abstract class DatadogTraceAbstractTest { - protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch) { + protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch, String localGitRepoPath) { final Map meta = span.getMeta(); assertEquals("Initial commit\n", meta.get(CITags.GIT_COMMIT_MESSAGE)); assertEquals("John Doe", meta.get(CITags.GIT_COMMIT_AUTHOR_NAME)); @@ -30,11 +30,11 @@ protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch) { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("file:///tmp/git-repo/", meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals("file://" + localGitRepoPath, meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals(defaultBranch, meta.get(CITags.GIT_DEFAULT_BRANCH)); } - protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBranch) { + protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBranch, String localGitRepoPath) { JSONObject meta = webhook.getJSONObject("git"); assertEquals("Initial commit\n", meta.get("message")); assertEquals("John Doe", meta.get("author_name")); @@ -45,7 +45,7 @@ protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBra assertEquals("2020-10-08T07:49:32.000Z", meta.get("commit_time")); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get("sha")); assertEquals("master", meta.get("branch")); - assertEquals("file:///tmp/git-repo/", meta.get("repository_url")); + assertEquals("file://" + localGitRepoPath, meta.get("repository_url")); assertEquals(defaultBranch, meta.get("default_branch")); } diff --git a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt index b5797b7c1..3762efaf7 100644 --- a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt +++ b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelineSuccessLocalCheckout.txt @@ -5,7 +5,7 @@ pipeline { steps { script { git branch: 'master', - url: 'file:///tmp/git-repo/' + url: '$LOCAL_REPO_URL' } } } diff --git a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt index bbe7b5ac5..8db6fa586 100644 --- a/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt +++ b/src/test/resources/org/datadog/jenkins/plugins/datadog/listeners/testPipelinesOverrideGitCommit.txt @@ -5,7 +5,7 @@ pipeline { steps { withEnv(["GIT_COMMIT=ccccbbbb"]) { git branch: 'master', - url: 'file:///tmp/git-repo/' + url: '$LOCAL_REPO_URL' } } } From c751e8350ce81e4cc97d5ff8ec2b53fbef2906ef Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Thu, 25 Jan 2024 16:27:15 +0100 Subject: [PATCH 08/17] Properly convert Windows path to URL --- .../listeners/DatadogBuildListenerIT.java | 42 ++++++++++++------- .../listeners/DatadogGraphListenerTest.java | 39 ++++++++++------- 2 files changed, 52 insertions(+), 29 deletions(-) diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index d98433c04..d5c0e358b 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -46,6 +46,7 @@ import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.jetbrains.annotations.NotNull; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -140,12 +141,12 @@ public void testTraces() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL", toUrl(localGitRepoPath.getRemote())); jenkins.getGlobalNodeProperties().add(prop); final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccess"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); final FilePath ws = jenkins.getWorkspaceFor(project); @@ -190,6 +191,19 @@ public void testTraces() throws Exception { assertCleanupActions(run); } + @NotNull + private static String toUrl(String path) { + if (isRunningOnWindows()) { + return "file:///" + path.replace('\\', '/'); + } else { + return "file://" + path; + } + } + + private static boolean isRunningOnWindows() { + return System.getProperty("os.name").toLowerCase().contains("win"); + } + @Test public void testGitDefaultBranch() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; @@ -197,14 +211,14 @@ public void testGitDefaultBranch() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL", toUrl(localGitRepoPath.getRemote())); final String defaultBranch = "refs/heads/hardcoded-master"; env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch); jenkins.getGlobalNodeProperties().add(prop); final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessDefaultBranch"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -226,7 +240,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); + env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote())); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); final String defaultBranch = "refs/heads/hardcoded-master"; @@ -235,7 +249,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithoutCommitInfo"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -256,7 +270,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { env.put(GIT_REPOSITORY_URL, "not-valid-repo"); env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); + env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote())); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -272,7 +286,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfo"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -293,7 +307,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("file://" + localGitRepoPath.getRemote(), meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals(toUrl(localGitRepoPath.getRemote()), meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH)); } @@ -307,7 +321,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { env.put(GIT_REPOSITORY_URL, "not-valid-repo"); env.put(GIT_BRANCH, "not-valid-branch"); env.put(GIT_COMMIT, "not-valid-commit"); - env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); + env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote())); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -323,7 +337,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfoWebhook"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); project.scheduleBuild2(0).get(); @@ -343,7 +357,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception { assertEquals("hardcoded-committer-date", meta.getString("commit_time")); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.getString("sha")); assertEquals("master", meta.getString("branch")); - assertEquals("file://" + localGitRepoPath.getRemote(), meta.getString("repository_url")); + assertEquals(toUrl(localGitRepoPath.getRemote()), meta.getString("repository_url")); assertEquals("hardcoded-master", meta.getString("default_branch")); } @@ -410,12 +424,12 @@ public void testGitAlternativeRepoUrlWebhook() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL_1", toUrl(localGitRepoPath.getRemote())); jenkins.getGlobalNodeProperties().add(prop); final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessAltRepoUrlWebhook"); - GitSCM git = new GitSCM(GitSCM.createRepoList("file://" + localGitRepoPath.getRemote(), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); + GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master"))); project.setScm(git); final FilePath ws = jenkins.getWorkspaceFor(project); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index 63a1bea69..28e40105c 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -33,7 +33,6 @@ import hudson.triggers.SCMTrigger.SCMTriggerCause; import hudson.triggers.TimerTrigger.TimerTriggerCause; import java.io.BufferedReader; -import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -67,6 +66,7 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; +import org.jetbrains.annotations.NotNull; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -242,7 +242,7 @@ public void testIntegrationGitInfo() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL", toUrl(localGitRepoPath.getRemote())); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommit"); String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); @@ -264,6 +264,15 @@ public void testIntegrationGitInfo() throws Exception { assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); } + @NotNull + private static String toUrl(String path) { + if (isRunningOnWindows()) { + return "file:///" + path.replace('\\', '/'); + } else { + return "file://" + path; + } + } + @Test public void testIntegrationNonCIVisibilityEnvVars() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; @@ -293,7 +302,7 @@ public void testIntegrationGitInfoWebhooks() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL", toUrl(localGitRepoPath.getRemote())); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitWebhooks"); String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt"); @@ -321,7 +330,7 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL", toUrl(localGitRepoPath.getRemote())); final String defaultBranch = "refs/heads/hardcoded-master"; env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch); @@ -352,7 +361,7 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL", toUrl(localGitRepoPath.getRemote())); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationOverrideCommit"); String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt"); @@ -378,7 +387,7 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception { private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\$([A-Z_]+)"); private String getPipelineDefinition(String file) throws IOException { - Map replacements = Collections.singletonMap("LOCAL_REPO_URL", "file://" + localGitRepoPath.getRemote()); + Map replacements = Collections.singletonMap("LOCAL_REPO_URL", toUrl(localGitRepoPath.getRemote())); String pipelineDefinition; try (InputStream is = DatadogGraphListenerTest.class.getResourceAsStream(file)) { @@ -403,7 +412,7 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL_1", toUrl(localGitRepoPath.getRemote())); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrl"); String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt"); @@ -422,7 +431,7 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception { final List spans = clientStub.getSpans(); assertEquals(5, spans.size()); for(TraceSpan span : spans) { - assertEquals("file://" + localGitRepoPath.getRemote(), span.getMeta().get(CITags.GIT_REPOSITORY_URL)); + assertEquals(toUrl(localGitRepoPath.getRemote()), span.getMeta().get(CITags.GIT_REPOSITORY_URL)); } } @@ -435,7 +444,7 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { EnvVars env = prop.getEnvVars(); env.put("GIT_BRANCH", "master"); env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f"); - env.put("GIT_URL_1", "file://" + localGitRepoPath.getRemote()); + env.put("GIT_URL_1", toUrl(localGitRepoPath.getRemote())); WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrlWebhooks"); String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt"); @@ -454,7 +463,7 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception { final List webhooks = clientStub.getWebhooks(); assertEquals(5, webhooks.size()); for(JSONObject webhook : webhooks) { - assertEquals("file://" + localGitRepoPath.getRemote(), webhook.getJSONObject("git").get("repository_url")); + assertEquals(toUrl(localGitRepoPath.getRemote()), webhook.getJSONObject("git").get("repository_url")); } } @@ -463,7 +472,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); + env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote())); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_TAG, "0.1.0"); @@ -506,7 +515,7 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); + env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote())); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_TAG, "0.1.0"); @@ -537,7 +546,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { Jenkins jenkins = jenkinsRule.jenkins; final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty(); EnvVars env = prop.getEnvVars(); - env.put(DD_GIT_REPOSITORY_URL, "file://" + localGitRepoPath.getRemote()); + env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote())); env.put(DD_GIT_BRANCH, "master"); env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f"); env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message"); @@ -577,7 +586,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception { assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("file://" + localGitRepoPath.getRemote(), meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals(toUrl(localGitRepoPath.getRemote()), meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH)); } @@ -1248,7 +1257,7 @@ private String getFailingPipelineDefinitionName() { return isRunningOnWindows() ? "testPipelineErrorOnStagesOnWindows.txt" : "testPipelineErrorOnStages.txt"; } - private boolean isRunningOnWindows() { + private static boolean isRunningOnWindows() { return System.getProperty("os.name").toLowerCase().contains("win"); } From ae44b1d41477608e887f4cf2e10eaf6441ec1b1d Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Thu, 25 Jan 2024 16:55:11 +0100 Subject: [PATCH 09/17] Fix git repo URL asserts in integration tests --- .../datadog/listeners/DatadogBuildListenerIT.java | 8 ++++---- .../datadog/listeners/DatadogGraphListenerTest.java | 10 +++++----- .../datadog/listeners/DatadogTraceAbstractTest.java | 8 ++++---- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index d5c0e358b..eb4f8f88e 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -161,7 +161,7 @@ public void testTraces() throws Exception { assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); + assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote())); final Map meta = buildSpan.getMeta(); final Map metrics = buildSpan.getMetrics(); assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_BUILD_LEVEL)); @@ -228,7 +228,7 @@ public void testGitDefaultBranch() throws Exception { assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "hardcoded-master", localGitRepoPath.getRemote()); + assertGitVariablesOnSpan(buildSpan, "hardcoded-master", toUrl(localGitRepoPath.getRemote())); } @Test @@ -259,7 +259,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { assertEquals(1, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "hardcoded-master", localGitRepoPath.getRemote()); + assertGitVariablesOnSpan(buildSpan, "hardcoded-master", toUrl(localGitRepoPath.getRemote())); } @Test @@ -443,7 +443,7 @@ public void testGitAlternativeRepoUrlWebhook() throws Exception { assertEquals(1, webhooks.size()); final JSONObject webhook = webhooks.get(0); - assertGitVariablesOnWebhook(webhook, "master", localGitRepoPath.getRemote()); + assertGitVariablesOnWebhook(webhook, "master", toUrl(localGitRepoPath.getRemote())); } @Test diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index 28e40105c..4fdcbe46a 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -261,7 +261,7 @@ public void testIntegrationGitInfo() throws Exception { final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); + assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote())); } @NotNull @@ -320,7 +320,7 @@ public void testIntegrationGitInfoWebhooks() throws Exception { clientStub.waitForWebhooks(3); final List webhook = clientStub.getWebhooks(); assertEquals(3, webhook.size()); - assertGitVariablesOnWebhook(webhook.get(0), "master", localGitRepoPath.getRemote()); + assertGitVariablesOnWebhook(webhook.get(0), "master", toUrl(localGitRepoPath.getRemote())); } @Test @@ -351,7 +351,7 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception { final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "hardcoded-master", localGitRepoPath.getRemote()); + assertGitVariablesOnSpan(buildSpan, "hardcoded-master", toUrl(localGitRepoPath.getRemote())); } @Test @@ -494,7 +494,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception { final List spans = clientStub.getSpans(); assertEquals(3, spans.size()); final TraceSpan buildSpan = spans.get(0); - assertGitVariablesOnSpan(buildSpan, "master", localGitRepoPath.getRemote()); + assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote())); final Map meta = buildSpan.getMeta(); assertEquals("0.1.0", meta.get(CITags.GIT_TAG)); } @@ -537,7 +537,7 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception { final List webhooks = clientStub.getWebhooks(); assertEquals(3, webhooks.size()); final JSONObject webhook = webhooks.get(0); - assertGitVariablesOnWebhook(webhook, "master", localGitRepoPath.getRemote()); + assertGitVariablesOnWebhook(webhook, "master", toUrl(localGitRepoPath.getRemote())); assertEquals("0.1.0", webhook.getJSONObject("git").get("tag")); } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java index 9f1c47cf2..3c1f5f6b7 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogTraceAbstractTest.java @@ -18,7 +18,7 @@ public abstract class DatadogTraceAbstractTest { - protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch, String localGitRepoPath) { + protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch, String gitRepoUrl) { final Map meta = span.getMeta(); assertEquals("Initial commit\n", meta.get(CITags.GIT_COMMIT_MESSAGE)); assertEquals("John Doe", meta.get(CITags.GIT_COMMIT_AUTHOR_NAME)); @@ -30,11 +30,11 @@ protected void assertGitVariablesOnSpan(TraceSpan span, String defaultBranch, St assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA)); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA)); assertEquals("master", meta.get(CITags.GIT_BRANCH)); - assertEquals("file://" + localGitRepoPath, meta.get(CITags.GIT_REPOSITORY_URL)); + assertEquals(gitRepoUrl, meta.get(CITags.GIT_REPOSITORY_URL)); assertEquals(defaultBranch, meta.get(CITags.GIT_DEFAULT_BRANCH)); } - protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBranch, String localGitRepoPath) { + protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBranch, String gitRepoUrl) { JSONObject meta = webhook.getJSONObject("git"); assertEquals("Initial commit\n", meta.get("message")); assertEquals("John Doe", meta.get("author_name")); @@ -45,7 +45,7 @@ protected void assertGitVariablesOnWebhook(JSONObject webhook, String defaultBra assertEquals("2020-10-08T07:49:32.000Z", meta.get("commit_time")); assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get("sha")); assertEquals("master", meta.get("branch")); - assertEquals("file://" + localGitRepoPath, meta.get("repository_url")); + assertEquals(gitRepoUrl, meta.get("repository_url")); assertEquals(defaultBranch, meta.get("default_branch")); } From e7566bdc6aadfdaa272c5278f7078e59fd90823f Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Thu, 25 Jan 2024 18:17:06 +0100 Subject: [PATCH 10/17] Replace custom shutdown hook with Jenkins @Terminator mechanism --- .../listeners/DatadogGraphListener.java | 2 +- .../datadog/traces/write/TraceWriter.java | 31 +++++-------------- .../traces/write/TraceWriterFactory.java | 15 ++++++++- 3 files changed, 22 insertions(+), 26 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java index 226216c80..0e3f5f621 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java @@ -110,7 +110,7 @@ public void onNewHead(FlowNode flowNode) { TraceWriter traceWriter = TraceWriterFactory.getTraceWriter(); if (traceWriter != null) { try { - traceWriter.submitPipeline(flowNode, run); + traceWriter.submitPipelineStep(flowNode, run); } catch (InterruptedException e) { Thread.currentThread().interrupt(); DatadogUtilities.severe(logger, e, "Interrupted while submitting pipeline trace for node " + flowNode.getDisplayName() + " in run " + (run != null ? run.getDisplayName() : "")); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index ad1f9782a..f53d77633 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -33,25 +33,24 @@ public final class TraceWriter { private final TraceWriteStrategy traceWriteStrategy; private final BlockingQueue queue; private final Thread poller; - private final Thread pollerShutdownHook; public TraceWriter(DatadogClient datadogClient) { this.traceWriteStrategy = datadogClient.createTraceWriteStrategy(); - this.queue = new ArrayBlockingQueue<>(getEnv(QUEUE_CAPACITY_ENV_VAR, DEFAULT_QUEUE_CAPACITY)); - this.poller = new Thread(this::runPollingLoop, "DD-Trace-Writer"); - - this.pollerShutdownHook = new Thread(this::runShutdownHook, "DD-Trace-Writer-Shutdown-Hook"); - Runtime.getRuntime().addShutdownHook(pollerShutdownHook); } public void start() { poller.start(); } - public void stop() { + public void stopAsynchronously() { + poller.interrupt(); + } + + public void stopSynchronously() throws InterruptedException { poller.interrupt(); + poller.join(TimeUnit.SECONDS.toMillis(getEnv(STOP_TIMEOUT_ENV_VAR, DEFAULT_STOP_TIMEOUT_SECONDS))); } public void submitBuild(final BuildData buildData, final Run run) throws InterruptedException, TimeoutException { @@ -59,7 +58,7 @@ public void submitBuild(final BuildData buildData, final Run run) throws In submit(buildJson); } - public void submitPipeline(FlowNode flowNode, Run run) throws InterruptedException, TimeoutException { + public void submitPipelineStep(FlowNode flowNode, Run run) throws InterruptedException, TimeoutException { Collection nodeJsons = traceWriteStrategy.serialize(flowNode, run); for (JSONObject nodeJson : nodeJsons) { submit(nodeJson); @@ -98,22 +97,6 @@ private void runPollingLoop() { } } logger.info("Queue polling stopped, spans not flushed: " + queue.size()); - - try { - Runtime.getRuntime().removeShutdownHook(pollerShutdownHook); - } catch (IllegalStateException e) { - // JVM is being shutdown, the hook has already been called - } - } - - private void runShutdownHook() { - stop(); - try { - // delay JVM shutdown until remaining spans are sent (or until timeout) - poller.join(TimeUnit.SECONDS.toMillis(getEnv(STOP_TIMEOUT_ENV_VAR, DEFAULT_STOP_TIMEOUT_SECONDS))); - } catch (InterruptedException e) { - // ignore, should be impossible to end up here - } } private static int getEnv(String envVar, int defaultValue) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java index 5de0bb808..71714cee9 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriterFactory.java @@ -1,5 +1,6 @@ package org.datadog.jenkins.plugins.datadog.traces.write; +import hudson.init.Terminator; import javax.annotation.Nullable; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; @@ -14,13 +15,25 @@ public static synchronized void onDatadogClientUpdate(@Nullable DatadogClient cl } if (TRACE_WRITER != null) { - TRACE_WRITER.stop(); + TRACE_WRITER.stopAsynchronously(); } TRACE_WRITER = new TraceWriter(client); TRACE_WRITER.start(); } + /** + * This method is called when the plugin is stopped. + * If writer is initialized, it will be stopped synchronously. + */ + @Terminator + public static synchronized void stop() throws InterruptedException { + if (TRACE_WRITER != null) { + TRACE_WRITER.stopSynchronously(); + TRACE_WRITER = null; + } + } + @Nullable public static TraceWriter getTraceWriter() { if (TRACE_WRITER == null) { From 52371d7f1756f24457278da6121ef9b917a70ec7 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Fri, 26 Jan 2024 11:36:46 +0100 Subject: [PATCH 11/17] Fix integration tests on Windows --- .../jenkins/plugins/datadog/DatadogUtilities.java | 5 ++--- .../datadog/listeners/DatadogStepListener.java | 12 ++++++++---- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java index c6d79226a..6eac713f9 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java @@ -93,6 +93,7 @@ public class DatadogUtilities { private static final Integer MAX_HOSTNAME_LEN = 255; private static final String DATE_FORMAT_ISO8601 = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"; + private static final List UNIX_OS = Arrays.asList("mac", "linux", "freebsd", "sunos"); /** * @return - The descriptor for the Datadog plugin. In this case the global configuration. @@ -489,8 +490,6 @@ public static String getAwsInstanceID() throws IOException { * @return a human readable String for the hostname. */ public static String getHostname(EnvVars envVars) { - String[] UNIX_OS = {"mac", "linux", "freebsd", "sunos"}; - // Check hostname configuration from Jenkins String hostname = null; try { @@ -537,7 +536,7 @@ public static String getHostname(EnvVars envVars) { // Check OS specific unix commands String os = getOS(); - if (Arrays.asList(UNIX_OS).contains(os)) { + if (UNIX_OS.contains(os)) { // Attempt to grab unix hostname try { String[] cmd = {"/bin/hostname", "-f"}; diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java index 73fa89a22..7b344e1c1 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java @@ -153,14 +153,18 @@ private static String getNodeHostname(final StepContext stepContext) { try { Computer computer = stepContext.get(Computer.class); if(computer != null) { + String computerNodeName = DatadogUtilities.getNodeName(computer); + if (DatadogUtilities.isMainNode(computerNodeName)) { + String masterHostname = DatadogUtilities.getHostname(null); + if (DatadogUtilities.isValidHostname(masterHostname)) { + return masterHostname; + } + } + String computerHostName = computer.getHostName(); if (DatadogUtilities.isValidHostname(computerHostName)) { return computerHostName; } - String computerNodeName = DatadogUtilities.getNodeName(computer); - if (DatadogUtilities.isMainNode(computerNodeName)) { - return DatadogUtilities.getHostname(null); - } } } catch (Exception e){ logger.fine("Unable to extract hostname from StepContext."); From c793cf89cdfb8384bb97d71639048d679809a85c Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Fri, 26 Jan 2024 17:43:48 +0100 Subject: [PATCH 12/17] Address review comments --- .../datadog/clients/DatadogAgentClient.java | 50 +++++++++---------- .../datadog/clients/DatadogApiClient.java | 50 ++++++++++--------- .../traces/write/AgentTraceWriteStrategy.java | 47 ++++++++++------- .../plugins/datadog/traces/write/Span.java | 25 ++++++++++ .../traces/write/TraceWriteStrategy.java | 12 +++-- .../traces/write/TraceWriteStrategyImpl.java | 42 +++++++++++----- .../datadog/traces/write/TraceWriter.java | 22 ++++---- .../plugins/datadog/traces/write/Track.java | 3 ++ .../plugins/datadog/util/CircuitBreaker.java | 5 -- .../datadog/clients/DatadogClientStub.java | 30 +++++++---- 10 files changed, 177 insertions(+), 109 deletions(-) create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java create mode 100644 src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java index b73756625..8507db19b 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java @@ -34,10 +34,10 @@ of this software and associated documentation files (the "Software"), to deal import java.net.Socket; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -49,14 +49,12 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; -import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic; -import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; -import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.write.AgentTraceWriteStrategy; +import org.datadog.jenkins.plugins.datadog.traces.write.Span; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategyImpl; +import org.datadog.jenkins.plugins.datadog.traces.write.Track; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; import org.json.JSONArray; @@ -524,8 +522,8 @@ public boolean sendLogs(String payload) { @Override public TraceWriteStrategy createTraceWriteStrategy() { - TraceWriteStrategyImpl evpStrategy = new TraceWriteStrategyImpl(new DatadogWebhookBuildLogic(), new DatadogWebhookPipelineLogic(), this::sendSpansToWebhook); - TraceWriteStrategyImpl apmStrategy = new TraceWriteStrategyImpl(new DatadogTraceBuildLogic(), new DatadogTracePipelineLogic(), this::sendSpansToApm); + TraceWriteStrategyImpl evpStrategy = new TraceWriteStrategyImpl(Track.WEBHOOK, this::sendSpansToWebhook); + TraceWriteStrategyImpl apmStrategy = new TraceWriteStrategyImpl(Track.APM, this::sendSpansToApm); return new AgentTraceWriteStrategy(evpStrategy, apmStrategy, this::isEvpProxySupported); } @@ -538,19 +536,7 @@ boolean isEvpProxySupported() { /** * Posts a given payload to the Agent EVP Proxy, so it is forwarded to the Webhook Intake. */ - private void sendSpansToWebhook(List spans) { - for (net.sf.json.JSONObject span : spans) { - // webhook intake does not support batch requests - postWebhook(span.toString()); - } - } - - /** - * Posts a given payload to the Agent EVP Proxy, so it is forwarded to the Webhook Intake. - */ - private void postWebhook(String payload) { - logger.fine("Sending webhook"); - + private void sendSpansToWebhook(Collection spans) { DatadogGlobalConfiguration datadogGlobalDescriptor = DatadogUtilities.getDatadogGlobalDescriptor(); String urlParameters = datadogGlobalDescriptor != null ? "?service=" + datadogGlobalDescriptor.getCiInstanceName() : ""; String url = String.format("http://%s:%d/evp_proxy/v1/api/v2/webhook/%s", hostname, traceCollectionPort, urlParameters); @@ -559,15 +545,29 @@ private void postWebhook(String payload) { headers.put("X-Datadog-EVP-Subdomain", "webhook-intake"); headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - byte[] body = payload.getBytes(StandardCharsets.UTF_8); - client.postAsynchronously(url, headers, "application/json", body); + for (Span span : spans) { + if (span.getTrack() != Track.WEBHOOK) { + logger.severe("Expected webhook track, got " + span.getTrack() + ", dropping span"); + continue; + } + + byte[] body = span.getPayload().toString().getBytes(StandardCharsets.UTF_8); + + // webhook intake does not support batch requests + logger.fine("Sending webhook"); + client.postAsynchronously(url, headers, "application/json", body); + } } - private void sendSpansToApm(List spans) { + private void sendSpansToApm(Collection spans) { try { Map tracesById = new HashMap<>(); - for (net.sf.json.JSONObject span : spans) { - tracesById.computeIfAbsent(span.getString(JsonTraceSpanMapper.TRACE_ID), k -> new net.sf.json.JSONArray()).add(span); + for (Span span : spans) { + if (span.getTrack() != Track.APM) { + logger.severe("Expected APM track, got " + span.getTrack() + ", dropping span"); + continue; + } + tracesById.computeIfAbsent(span.getPayload().getString(JsonTraceSpanMapper.TRACE_ID), k -> new net.sf.json.JSONArray()).add(span.getPayload()); } final JSONArray jsonTraces = new JSONArray(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java index e67ccec68..23ccd88df 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java @@ -28,9 +28,9 @@ of this software and associated documentation files (the "Software"), to deal import hudson.util.Secret; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; @@ -45,10 +45,10 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; -import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; +import org.datadog.jenkins.plugins.datadog.traces.write.Span; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategyImpl; +import org.datadog.jenkins.plugins.datadog.traces.write.Track; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; @@ -102,14 +102,13 @@ public class DatadogApiClient implements DatadogClient { public static DatadogClient getInstance(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey){ // If the configuration has not changed, return the current instance without validation // since we've already validated and/or errored about the data - - DatadogApiClient newInstance = new DatadogApiClient(url, logIntakeUrl, webhookIntakeUrl, apiKey); - if (instance != null && instance.equals(newInstance)) { + if (instance != null && !configurationChanged(url, logIntakeUrl, webhookIntakeUrl, apiKey)) { if (DatadogApiClient.failedLastValidation) { return null; } return instance; } + DatadogApiClient newInstance = new DatadogApiClient(url, logIntakeUrl, webhookIntakeUrl, apiKey); if (enableValidations) { synchronized (DatadogApiClient.class) { DatadogApiClient.instance = newInstance; @@ -126,6 +125,13 @@ public static DatadogClient getInstance(String url, String logIntakeUrl, String return newInstance; } + private static boolean configurationChanged(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey){ + return !instance.getUrl().equals(url) || + !instance.getLogIntakeUrl().equals(logIntakeUrl) || + !instance.getWebhookIntakeUrl().equals(webhookIntakeUrl) || + !instance.getApiKey().equals(apiKey); + } + private DatadogApiClient(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey) { this.url = url; this.apiKey = apiKey; @@ -485,24 +491,10 @@ private boolean validateWebhookIntakeConnection() throws IOException { @Override public TraceWriteStrategy createTraceWriteStrategy() { - return new TraceWriteStrategyImpl(new DatadogWebhookBuildLogic(), new DatadogWebhookPipelineLogic(), this::sendSpans); + return new TraceWriteStrategyImpl(Track.WEBHOOK, this::sendSpans); } - private void sendSpans(List spans) { - for (JSONObject span : spans) { - // webhook intake does not support batch requests - postWebhook(span.toString()); - } - } - - /** - * Posts a given payload to the Datadog Webhook Intake, using the user configured apiKey. - * - * @param payload - A webhook payload. - */ - private void postWebhook(String payload) { - logger.fine("Sending webhook"); - + private void sendSpans(Collection spans) { if (this.webhookIntakeConnectionBroken) { throw new RuntimeException("Your client is not initialized properly; webhook intake connection is broken."); } @@ -515,7 +507,17 @@ private void postWebhook(String payload) { headers.put("DD-API-KEY", Secret.toString(apiKey)); headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - byte[] body = payload.getBytes(StandardCharsets.UTF_8); - httpClient.postAsynchronously(url, headers, "application/json", body); + for (Span span : spans) { + if (span.getTrack() != Track.WEBHOOK) { + logger.severe("Expected webhook track, got " + span.getTrack() + ", dropping span"); + continue; + } + + byte[] body = span.getPayload().toString().getBytes(StandardCharsets.UTF_8); + + // webhook intake does not support batch requests + logger.fine("Sending webhook"); + httpClient.postAsynchronously(url, headers, "application/json", body); + } } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java index 790268f9d..e3c206281 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java @@ -3,10 +3,12 @@ import hudson.model.Run; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.logging.Logger; -import net.sf.json.JSONObject; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.datadog.jenkins.plugins.datadog.clients.DatadogAgentClient; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.jenkinsci.plugins.workflow.graph.FlowNode; @@ -27,6 +29,14 @@ public class AgentTraceWriteStrategy implements TraceWriteStrategy { private final TraceWriteStrategy evpProxyStrategy; private final TraceWriteStrategy apmStrategy; private final Supplier checkEvpProxySupport; + /** + * Whether the Agent supports EVP Proxy. + *

+ * This value may change from {@code false} to {@code true} if the Agent that this Jenkins talks to gets updated + * (the Agent's support for EVP proxy is checked periodically). + *

+ * We don't handle agent downgrades, so {@code true} to {@code false} change is not possible. + */ private volatile boolean evpProxySupported = false; private volatile long lastEvpProxyCheckTimeMs = 0L; @@ -37,32 +47,33 @@ public AgentTraceWriteStrategy(TraceWriteStrategy evpProxyStrategy, TraceWriteSt } @Override - public JSONObject serialize(BuildData buildData, Run run) { - return getCurrentStrategy().serialize(buildData, run); + public Span createSpan(BuildData buildData, Run run) { + return getCurrentStrategy().createSpan(buildData, run); } + @Nonnull @Override - public Collection serialize(FlowNode flowNode, Run run) { - return getCurrentStrategy().serialize(flowNode, run); + public Collection createSpan(FlowNode flowNode, Run run) { + return getCurrentStrategy().createSpan(flowNode, run); } @Override - public void send(List spans) { - // we have to check serialized spans to know where to send them, + public void send(Collection spans) { + // we have to check the track for every span, // because the serialization strategy might've changed in between serialize() and send() - if (isWebhook(spans)) { - evpProxyStrategy.send(spans); - } else { - apmStrategy.send(spans); - } - } + Map> spansByTrack = spans.stream().collect(Collectors.groupingBy(Span::getTrack)); + for (Map.Entry> e : spansByTrack.entrySet()) { + Track track = e.getKey(); + List trackSpans = e.getValue(); - private boolean isWebhook(List spans) { - if (spans.isEmpty()) { - return false; + if (track == Track.WEBHOOK) { + evpProxyStrategy.send(trackSpans); + } else if (track == Track.APM) { + apmStrategy.send(trackSpans); + } else { + throw new IllegalArgumentException("Unexpected track value: " + track); + } } - JSONObject span = spans.iterator().next(); - return span.get("level") != null; } private TraceWriteStrategy getCurrentStrategy() { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java new file mode 100644 index 000000000..bbb7a98f8 --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java @@ -0,0 +1,25 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +import javax.annotation.Nonnull; +import net.sf.json.JSONObject; + +public class Span { + + private final JSONObject payload; + private final Track track; + + public Span(@Nonnull JSONObject payload, @Nonnull Track track) { + this.payload = payload; + this.track = track; + } + + @Nonnull + public JSONObject getPayload() { + return payload; + } + + @Nonnull + public Track getTrack() { + return track; + } +} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java index c73a7f7a4..504cea8d4 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java @@ -2,15 +2,17 @@ import hudson.model.Run; import java.util.Collection; -import java.util.List; -import net.sf.json.JSONObject; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.jenkinsci.plugins.workflow.graph.FlowNode; public interface TraceWriteStrategy { - JSONObject serialize(BuildData buildData, Run run); + @Nullable + Span createSpan(BuildData buildData, Run run); - Collection serialize(FlowNode flowNode, Run run); + @Nonnull + Collection createSpan(FlowNode flowNode, Run run); - void send(List spans); + void send(Collection spans); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java index ce6e822ae..a74ad61de 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java @@ -2,14 +2,20 @@ import hudson.model.Run; import java.util.Collection; -import java.util.List; +import java.util.Collections; import java.util.function.Consumer; import java.util.logging.Logger; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic; +import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; +import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic; +import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; +import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; import org.datadog.jenkins.plugins.datadog.util.CircuitBreaker; import org.jenkinsci.plugins.workflow.graph.FlowNode; @@ -17,13 +23,22 @@ public class TraceWriteStrategyImpl implements TraceWriteStrategy { private static final Logger logger = Logger.getLogger(TraceWriteStrategyImpl.class.getName()); + private final Track track; private final DatadogBaseBuildLogic buildLogic; private final DatadogBasePipelineLogic pipelineLogic; - private final CircuitBreaker> sendSpansCircuitBreaker; + private final CircuitBreaker> sendSpansCircuitBreaker; - public TraceWriteStrategyImpl(DatadogBaseBuildLogic buildLogic, DatadogBasePipelineLogic pipelineLogic, Consumer> spansSender) { - this.buildLogic = buildLogic; - this.pipelineLogic = pipelineLogic; + public TraceWriteStrategyImpl(Track track, Consumer> spansSender) { + if (track == Track.APM) { + this.buildLogic = new DatadogTraceBuildLogic(); + this.pipelineLogic = new DatadogTracePipelineLogic(); + } else if (track == Track.WEBHOOK) { + this.buildLogic = new DatadogWebhookBuildLogic(); + this.pipelineLogic = new DatadogWebhookPipelineLogic(); + } else { + throw new IllegalArgumentException("Unexpected track value: " + track); + } + this.track = track; this.sendSpansCircuitBreaker = new CircuitBreaker<>( spansSender, this::logTransportBroken, @@ -32,21 +47,24 @@ public TraceWriteStrategyImpl(DatadogBaseBuildLogic buildLogic, DatadogBasePipel } @Override - public JSONObject serialize(final BuildData buildData, final Run run) { - return buildLogic.finishBuildTrace(buildData, run); + public Span createSpan(final BuildData buildData, final Run run) { + JSONObject buildSpan = buildLogic.finishBuildTrace(buildData, run); + return buildSpan != null ? new Span(buildSpan, track) : null; } + @Nonnull @Override - public Collection serialize(FlowNode flowNode, Run run) { - return pipelineLogic.execute(flowNode, run); + public Collection createSpan(FlowNode flowNode, Run run) { + Collection stepSpans = pipelineLogic.execute(flowNode, run); + return stepSpans.stream().map(payload -> new Span(payload, track)).collect(Collectors.toList()); } @Override - public void send(List spans) { - sendSpansCircuitBreaker.accept(spans); + public void send(Collection serializationResult) { + sendSpansCircuitBreaker.accept(serializationResult); } - private void logTransportBroken(List spans) { + private void logTransportBroken(Collection spans) { logger.fine("Ignoring " + spans.size() + " because transport is broken"); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index f53d77633..c536ace20 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -9,7 +9,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Logger; -import net.sf.json.JSONObject; +import javax.annotation.Nullable; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; @@ -31,7 +31,7 @@ public final class TraceWriter { private static final int DEFAULT_BATCH_SIZE_LIMIT = 100; private final TraceWriteStrategy traceWriteStrategy; - private final BlockingQueue queue; + private final BlockingQueue queue; private final Thread poller; public TraceWriter(DatadogClient datadogClient) { @@ -54,19 +54,19 @@ public void stopSynchronously() throws InterruptedException { } public void submitBuild(final BuildData buildData, final Run run) throws InterruptedException, TimeoutException { - JSONObject buildJson = traceWriteStrategy.serialize(buildData, run); - submit(buildJson); + Span span = traceWriteStrategy.createSpan(buildData, run); + submit(span); } public void submitPipelineStep(FlowNode flowNode, Run run) throws InterruptedException, TimeoutException { - Collection nodeJsons = traceWriteStrategy.serialize(flowNode, run); - for (JSONObject nodeJson : nodeJsons) { - submit(nodeJson); + Collection spans = traceWriteStrategy.createSpan(flowNode, run); + for (Span span : spans) { + submit(span); } } - private void submit(JSONObject json) throws InterruptedException, TimeoutException { - if (!queue.offer(json, getEnv(SUBMIT_TIMEOUT_ENV_VAR, DEFAULT_SUBMIT_TIMEOUT_SECONDS), TimeUnit.SECONDS)) { + private void submit(@Nullable Span span) throws InterruptedException, TimeoutException { + if (span != null && !queue.offer(span, getEnv(SUBMIT_TIMEOUT_ENV_VAR, DEFAULT_SUBMIT_TIMEOUT_SECONDS), TimeUnit.SECONDS)) { throw new TimeoutException("Timed out while submitting span"); } } @@ -75,14 +75,14 @@ private void runPollingLoop() { long stopPollingAt = Long.MAX_VALUE; while (System.currentTimeMillis() < stopPollingAt) { try { - JSONObject span = queue.poll(getEnv(POLLING_TIMEOUT_ENV_VAR, DEFAULT_POLLING_TIMEOUT_SECONDS), TimeUnit.SECONDS); + Span span = queue.poll(getEnv(POLLING_TIMEOUT_ENV_VAR, DEFAULT_POLLING_TIMEOUT_SECONDS), TimeUnit.SECONDS); if (span == null) { // nothing to send continue; } int batchSize = getEnv(BATCH_SIZE_LIMIT_ENV_VAR, DEFAULT_BATCH_SIZE_LIMIT); - List spans = new ArrayList<>(batchSize); + List spans = new ArrayList<>(batchSize); spans.add(span); queue.drainTo(spans, batchSize - 1); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java new file mode 100644 index 000000000..5114713be --- /dev/null +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java @@ -0,0 +1,3 @@ +package org.datadog.jenkins.plugins.datadog.traces.write; + +public enum Track {APM, WEBHOOK} diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java index 0cd5d5126..5b1f31da1 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/CircuitBreaker.java @@ -75,9 +75,4 @@ public synchronized void accept(T t) { fallback.accept(t); } } - - public synchronized void reset() { - healthy = true; - healthCheckDelayMillis = minHealthCheckDelayMillis; - } } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java index 098351110..36c810096 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java @@ -38,6 +38,7 @@ of this software and associated documentation files (the "Software"), to deal import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogEvent; @@ -48,7 +49,9 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; +import org.datadog.jenkins.plugins.datadog.traces.write.Span; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; +import org.datadog.jenkins.plugins.datadog.traces.write.Track; import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.junit.Assert; @@ -265,34 +268,43 @@ private static final class StubTraceWriteStrategy implements TraceWriteStrategy private final Collection webhooks = new LinkedBlockingQueue<>(); @Override - public JSONObject serialize(BuildData buildData, Run run) { + public Span createSpan(BuildData buildData, Run run) { if (isWebhook) { JSONObject json = new DatadogWebhookBuildLogic().finishBuildTrace(buildData, run); + if (json == null) { + return null; + } webhooks.add(json); - return json; + return new Span(json, Track.WEBHOOK); } else { TraceSpan span = new DatadogTraceBuildLogic().createSpan(buildData, run); + if (span == null) { + return null; + } traces.add(span); - return new JsonTraceSpanMapper().map(span); + JSONObject json = new JsonTraceSpanMapper().map(span); + return new Span(json, Track.APM); } } + @Nonnull @Override - public Collection serialize(FlowNode flowNode, Run run) { + public Collection createSpan(FlowNode flowNode, Run run) { if (isWebhook) { - Collection spans = new DatadogWebhookPipelineLogic().execute(flowNode, run); - webhooks.addAll(spans); - return spans; + Collection jsons = new DatadogWebhookPipelineLogic().execute(flowNode, run); + webhooks.addAll(jsons); + return jsons.stream().map(payload -> new Span(payload, Track.WEBHOOK)).collect(Collectors.toList()); } else { Collection traceSpans = new DatadogTracePipelineLogic().collectTraces(flowNode, run); traces.addAll(traceSpans); JsonTraceSpanMapper mapper = new JsonTraceSpanMapper(); - return traceSpans.stream().map(mapper::map).collect(Collectors.toList()); + List jsons = traceSpans.stream().map(mapper::map).collect(Collectors.toList()); + return jsons.stream().map(payload -> new Span(payload, Track.APM)).collect(Collectors.toList()); } } @Override - public void send(List spans) { + public void send(Collection spans) { // no op } From eeeb4c2b3315d985601df1fe375722985fba1044 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Fri, 26 Jan 2024 17:50:33 +0100 Subject: [PATCH 13/17] Fix NPE --- .../plugins/datadog/clients/DatadogApiClient.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java index 23ccd88df..0fba0167e 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java @@ -25,6 +25,7 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog.clients; +import com.google.common.base.Objects; import hudson.util.Secret; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -126,10 +127,10 @@ public static DatadogClient getInstance(String url, String logIntakeUrl, String } private static boolean configurationChanged(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey){ - return !instance.getUrl().equals(url) || - !instance.getLogIntakeUrl().equals(logIntakeUrl) || - !instance.getWebhookIntakeUrl().equals(webhookIntakeUrl) || - !instance.getApiKey().equals(apiKey); + return !Objects.equal(instance.getUrl(), url) || + !Objects.equal(instance.getLogIntakeUrl(), logIntakeUrl) || + !Objects.equal(instance.getWebhookIntakeUrl(), webhookIntakeUrl) || + !Objects.equal(instance.getApiKey(), apiKey); } private DatadogApiClient(String url, String logIntakeUrl, String webhookIntakeUrl, Secret apiKey) { From 1756c79e9c01cdfb53dbeada08747d9b6bcde385 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Fri, 26 Jan 2024 18:05:39 +0100 Subject: [PATCH 14/17] Renamed some classes and methods --- .../datadog/clients/DatadogAgentClient.java | 14 +++++++------- .../datadog/clients/DatadogApiClient.java | 8 ++++---- .../traces/write/AgentTraceWriteStrategy.java | 16 ++++++++-------- .../traces/write/{Span.java => Payload.java} | 12 ++++++------ .../traces/write/TraceWriteStrategy.java | 6 +++--- .../traces/write/TraceWriteStrategyImpl.java | 17 ++++++++--------- .../datadog/traces/write/TraceWriter.java | 14 +++++++------- .../plugins/datadog/traces/write/Track.java | 4 +++- .../datadog/clients/DatadogClientStub.java | 16 ++++++++-------- 9 files changed, 54 insertions(+), 53 deletions(-) rename src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/{Span.java => Payload.java} (57%) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java index 8507db19b..9fab0011c 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java @@ -51,7 +51,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.write.AgentTraceWriteStrategy; -import org.datadog.jenkins.plugins.datadog.traces.write.Span; +import org.datadog.jenkins.plugins.datadog.traces.write.Payload; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategyImpl; import org.datadog.jenkins.plugins.datadog.traces.write.Track; @@ -536,7 +536,7 @@ boolean isEvpProxySupported() { /** * Posts a given payload to the Agent EVP Proxy, so it is forwarded to the Webhook Intake. */ - private void sendSpansToWebhook(Collection spans) { + private void sendSpansToWebhook(Collection spans) { DatadogGlobalConfiguration datadogGlobalDescriptor = DatadogUtilities.getDatadogGlobalDescriptor(); String urlParameters = datadogGlobalDescriptor != null ? "?service=" + datadogGlobalDescriptor.getCiInstanceName() : ""; String url = String.format("http://%s:%d/evp_proxy/v1/api/v2/webhook/%s", hostname, traceCollectionPort, urlParameters); @@ -545,13 +545,13 @@ private void sendSpansToWebhook(Collection spans) { headers.put("X-Datadog-EVP-Subdomain", "webhook-intake"); headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - for (Span span : spans) { + for (Payload span : spans) { if (span.getTrack() != Track.WEBHOOK) { logger.severe("Expected webhook track, got " + span.getTrack() + ", dropping span"); continue; } - byte[] body = span.getPayload().toString().getBytes(StandardCharsets.UTF_8); + byte[] body = span.getJson().toString().getBytes(StandardCharsets.UTF_8); // webhook intake does not support batch requests logger.fine("Sending webhook"); @@ -559,15 +559,15 @@ private void sendSpansToWebhook(Collection spans) { } } - private void sendSpansToApm(Collection spans) { + private void sendSpansToApm(Collection spans) { try { Map tracesById = new HashMap<>(); - for (Span span : spans) { + for (Payload span : spans) { if (span.getTrack() != Track.APM) { logger.severe("Expected APM track, got " + span.getTrack() + ", dropping span"); continue; } - tracesById.computeIfAbsent(span.getPayload().getString(JsonTraceSpanMapper.TRACE_ID), k -> new net.sf.json.JSONArray()).add(span.getPayload()); + tracesById.computeIfAbsent(span.getJson().getString(JsonTraceSpanMapper.TRACE_ID), k -> new net.sf.json.JSONArray()).add(span.getJson()); } final JSONArray jsonTraces = new JSONArray(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java index 0fba0167e..2573dbe1f 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java @@ -46,7 +46,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; -import org.datadog.jenkins.plugins.datadog.traces.write.Span; +import org.datadog.jenkins.plugins.datadog.traces.write.Payload; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategyImpl; import org.datadog.jenkins.plugins.datadog.traces.write.Track; @@ -495,7 +495,7 @@ public TraceWriteStrategy createTraceWriteStrategy() { return new TraceWriteStrategyImpl(Track.WEBHOOK, this::sendSpans); } - private void sendSpans(Collection spans) { + private void sendSpans(Collection spans) { if (this.webhookIntakeConnectionBroken) { throw new RuntimeException("Your client is not initialized properly; webhook intake connection is broken."); } @@ -508,13 +508,13 @@ private void sendSpans(Collection spans) { headers.put("DD-API-KEY", Secret.toString(apiKey)); headers.put("DD-CI-PROVIDER-NAME", "jenkins"); - for (Span span : spans) { + for (Payload span : spans) { if (span.getTrack() != Track.WEBHOOK) { logger.severe("Expected webhook track, got " + span.getTrack() + ", dropping span"); continue; } - byte[] body = span.getPayload().toString().getBytes(StandardCharsets.UTF_8); + byte[] body = span.getJson().toString().getBytes(StandardCharsets.UTF_8); // webhook intake does not support batch requests logger.fine("Sending webhook"); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java index e3c206281..04d875f54 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java @@ -47,24 +47,24 @@ public AgentTraceWriteStrategy(TraceWriteStrategy evpProxyStrategy, TraceWriteSt } @Override - public Span createSpan(BuildData buildData, Run run) { - return getCurrentStrategy().createSpan(buildData, run); + public Payload serialize(BuildData buildData, Run run) { + return getCurrentStrategy().serialize(buildData, run); } @Nonnull @Override - public Collection createSpan(FlowNode flowNode, Run run) { - return getCurrentStrategy().createSpan(flowNode, run); + public Collection serialize(FlowNode flowNode, Run run) { + return getCurrentStrategy().serialize(flowNode, run); } @Override - public void send(Collection spans) { + public void send(Collection spans) { // we have to check the track for every span, // because the serialization strategy might've changed in between serialize() and send() - Map> spansByTrack = spans.stream().collect(Collectors.groupingBy(Span::getTrack)); - for (Map.Entry> e : spansByTrack.entrySet()) { + Map> spansByTrack = spans.stream().collect(Collectors.groupingBy(Payload::getTrack)); + for (Map.Entry> e : spansByTrack.entrySet()) { Track track = e.getKey(); - List trackSpans = e.getValue(); + List trackSpans = e.getValue(); if (track == Track.WEBHOOK) { evpProxyStrategy.send(trackSpans); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Payload.java similarity index 57% rename from src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java rename to src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Payload.java index bbb7a98f8..d9c05fd13 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Span.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Payload.java @@ -3,19 +3,19 @@ import javax.annotation.Nonnull; import net.sf.json.JSONObject; -public class Span { +public class Payload { - private final JSONObject payload; + private final JSONObject json; private final Track track; - public Span(@Nonnull JSONObject payload, @Nonnull Track track) { - this.payload = payload; + public Payload(@Nonnull JSONObject json, @Nonnull Track track) { + this.json = json; this.track = track; } @Nonnull - public JSONObject getPayload() { - return payload; + public JSONObject getJson() { + return json; } @Nonnull diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java index 504cea8d4..ce35751fd 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java @@ -9,10 +9,10 @@ public interface TraceWriteStrategy { @Nullable - Span createSpan(BuildData buildData, Run run); + Payload serialize(BuildData buildData, Run run); @Nonnull - Collection createSpan(FlowNode flowNode, Run run); + Collection serialize(FlowNode flowNode, Run run); - void send(Collection spans); + void send(Collection spans); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java index a74ad61de..39cbd4a96 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java @@ -2,7 +2,6 @@ import hudson.model.Run; import java.util.Collection; -import java.util.Collections; import java.util.function.Consumer; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -26,9 +25,9 @@ public class TraceWriteStrategyImpl implements TraceWriteStrategy { private final Track track; private final DatadogBaseBuildLogic buildLogic; private final DatadogBasePipelineLogic pipelineLogic; - private final CircuitBreaker> sendSpansCircuitBreaker; + private final CircuitBreaker> sendSpansCircuitBreaker; - public TraceWriteStrategyImpl(Track track, Consumer> spansSender) { + public TraceWriteStrategyImpl(Track track, Consumer> spansSender) { if (track == Track.APM) { this.buildLogic = new DatadogTraceBuildLogic(); this.pipelineLogic = new DatadogTracePipelineLogic(); @@ -47,24 +46,24 @@ public TraceWriteStrategyImpl(Track track, Consumer> spansSende } @Override - public Span createSpan(final BuildData buildData, final Run run) { + public Payload serialize(final BuildData buildData, final Run run) { JSONObject buildSpan = buildLogic.finishBuildTrace(buildData, run); - return buildSpan != null ? new Span(buildSpan, track) : null; + return buildSpan != null ? new Payload(buildSpan, track) : null; } @Nonnull @Override - public Collection createSpan(FlowNode flowNode, Run run) { + public Collection serialize(FlowNode flowNode, Run run) { Collection stepSpans = pipelineLogic.execute(flowNode, run); - return stepSpans.stream().map(payload -> new Span(payload, track)).collect(Collectors.toList()); + return stepSpans.stream().map(payload -> new Payload(payload, track)).collect(Collectors.toList()); } @Override - public void send(Collection serializationResult) { + public void send(Collection serializationResult) { sendSpansCircuitBreaker.accept(serializationResult); } - private void logTransportBroken(Collection spans) { + private void logTransportBroken(Collection spans) { logger.fine("Ignoring " + spans.size() + " because transport is broken"); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index c536ace20..7d911a635 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -31,7 +31,7 @@ public final class TraceWriter { private static final int DEFAULT_BATCH_SIZE_LIMIT = 100; private final TraceWriteStrategy traceWriteStrategy; - private final BlockingQueue queue; + private final BlockingQueue queue; private final Thread poller; public TraceWriter(DatadogClient datadogClient) { @@ -54,18 +54,18 @@ public void stopSynchronously() throws InterruptedException { } public void submitBuild(final BuildData buildData, final Run run) throws InterruptedException, TimeoutException { - Span span = traceWriteStrategy.createSpan(buildData, run); + Payload span = traceWriteStrategy.serialize(buildData, run); submit(span); } public void submitPipelineStep(FlowNode flowNode, Run run) throws InterruptedException, TimeoutException { - Collection spans = traceWriteStrategy.createSpan(flowNode, run); - for (Span span : spans) { + Collection spans = traceWriteStrategy.serialize(flowNode, run); + for (Payload span : spans) { submit(span); } } - private void submit(@Nullable Span span) throws InterruptedException, TimeoutException { + private void submit(@Nullable Payload span) throws InterruptedException, TimeoutException { if (span != null && !queue.offer(span, getEnv(SUBMIT_TIMEOUT_ENV_VAR, DEFAULT_SUBMIT_TIMEOUT_SECONDS), TimeUnit.SECONDS)) { throw new TimeoutException("Timed out while submitting span"); } @@ -75,14 +75,14 @@ private void runPollingLoop() { long stopPollingAt = Long.MAX_VALUE; while (System.currentTimeMillis() < stopPollingAt) { try { - Span span = queue.poll(getEnv(POLLING_TIMEOUT_ENV_VAR, DEFAULT_POLLING_TIMEOUT_SECONDS), TimeUnit.SECONDS); + Payload span = queue.poll(getEnv(POLLING_TIMEOUT_ENV_VAR, DEFAULT_POLLING_TIMEOUT_SECONDS), TimeUnit.SECONDS); if (span == null) { // nothing to send continue; } int batchSize = getEnv(BATCH_SIZE_LIMIT_ENV_VAR, DEFAULT_BATCH_SIZE_LIMIT); - List spans = new ArrayList<>(batchSize); + List spans = new ArrayList<>(batchSize); spans.add(span); queue.drainTo(spans, batchSize - 1); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java index 5114713be..e3b756536 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/Track.java @@ -1,3 +1,5 @@ package org.datadog.jenkins.plugins.datadog.traces.write; -public enum Track {APM, WEBHOOK} +public enum Track { + APM, WEBHOOK +} diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java index 36c810096..0fd5e589e 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java @@ -49,7 +49,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; -import org.datadog.jenkins.plugins.datadog.traces.write.Span; +import org.datadog.jenkins.plugins.datadog.traces.write.Payload; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy; import org.datadog.jenkins.plugins.datadog.traces.write.Track; import org.jenkinsci.plugins.workflow.graph.FlowNode; @@ -268,14 +268,14 @@ private static final class StubTraceWriteStrategy implements TraceWriteStrategy private final Collection webhooks = new LinkedBlockingQueue<>(); @Override - public Span createSpan(BuildData buildData, Run run) { + public Payload serialize(BuildData buildData, Run run) { if (isWebhook) { JSONObject json = new DatadogWebhookBuildLogic().finishBuildTrace(buildData, run); if (json == null) { return null; } webhooks.add(json); - return new Span(json, Track.WEBHOOK); + return new Payload(json, Track.WEBHOOK); } else { TraceSpan span = new DatadogTraceBuildLogic().createSpan(buildData, run); if (span == null) { @@ -283,28 +283,28 @@ public Span createSpan(BuildData buildData, Run run) { } traces.add(span); JSONObject json = new JsonTraceSpanMapper().map(span); - return new Span(json, Track.APM); + return new Payload(json, Track.APM); } } @Nonnull @Override - public Collection createSpan(FlowNode flowNode, Run run) { + public Collection serialize(FlowNode flowNode, Run run) { if (isWebhook) { Collection jsons = new DatadogWebhookPipelineLogic().execute(flowNode, run); webhooks.addAll(jsons); - return jsons.stream().map(payload -> new Span(payload, Track.WEBHOOK)).collect(Collectors.toList()); + return jsons.stream().map(payload -> new Payload(payload, Track.WEBHOOK)).collect(Collectors.toList()); } else { Collection traceSpans = new DatadogTracePipelineLogic().collectTraces(flowNode, run); traces.addAll(traceSpans); JsonTraceSpanMapper mapper = new JsonTraceSpanMapper(); List jsons = traceSpans.stream().map(mapper::map).collect(Collectors.toList()); - return jsons.stream().map(payload -> new Span(payload, Track.APM)).collect(Collectors.toList()); + return jsons.stream().map(payload -> new Payload(payload, Track.APM)).collect(Collectors.toList()); } } @Override - public void send(Collection spans) { + public void send(Collection spans) { // no op } From 9f40662ac692fe4903c4565914ee362ef1bb640d Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Tue, 30 Jan 2024 15:08:08 +0100 Subject: [PATCH 15/17] Address review comments --- .../plugins/datadog/DatadogUtilities.java | 39 +++++++---- .../listeners/DatadogGraphListener.java | 44 ++++++++++-- .../listeners/DatadogStepListener.java | 12 ++++ .../plugins/datadog/logs/DatadogWriter.java | 4 +- .../plugins/datadog/model/BuildData.java | 17 ++++- .../datadog/model/GitRepositoryAction.java | 4 ++ ...ipelineNode.java => PipelineStepData.java} | 36 +++++++--- .../datadog/model/TraceInfoAction.java | 20 ++++++ .../datadog/traces/DatadogBaseBuildLogic.java | 2 + .../traces/DatadogBasePipelineLogic.java | 22 +++--- .../traces/DatadogTraceBuildLogic.java | 12 ++-- .../traces/DatadogTracePipelineLogic.java | 16 ++--- .../traces/DatadogWebhookBuildLogic.java | 12 ++-- .../traces/DatadogWebhookPipelineLogic.java | 4 +- .../traces/write/AgentTraceWriteStrategy.java | 6 +- .../traces/write/TraceWriteStrategy.java | 4 +- .../traces/write/TraceWriteStrategyImpl.java | 6 +- .../datadog/traces/write/TraceWriter.java | 6 +- .../datadog/util/git/RepositoryInfo.java | 4 ++ .../plugins/datadog/DatadogUtilitiesTest.java | 68 +++++++++++++------ .../datadog/clients/DatadogClientStub.java | 8 +-- .../listeners/DatadogBuildListenerIT.java | 8 +-- .../listeners/DatadogGraphListenerTest.java | 38 +++++------ 23 files changed, 266 insertions(+), 126 deletions(-) rename src/main/java/org/datadog/jenkins/plugins/datadog/model/{BuildPipelineNode.java => PipelineStepData.java} (85%) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java index 6eac713f9..1c8836eec 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/DatadogUtilities.java @@ -27,6 +27,7 @@ of this software and associated documentation files (the "Software"), to deal import hudson.EnvVars; import hudson.ExtensionList; +import hudson.model.Actionable; import hudson.model.Computer; import hudson.model.Item; import hudson.model.Result; @@ -628,7 +629,7 @@ public static Boolean isValidHostname(String hostname) { return m.find(); } - private static boolean isPrivateIPv4Address(String ipAddress) { + static boolean isPrivateIPv4Address(String ipAddress) { if (ipAddress == null || ipAddress.isEmpty()) { return false; } @@ -640,7 +641,21 @@ private static boolean isPrivateIPv4Address(String ipAddress) { try { int firstOctet = Integer.parseInt(parts[0]); + if (!isWithinIPv4OctetRange(firstOctet)) { + return false; + } int secondOctet = Integer.parseInt(parts[1]); + if (!isWithinIPv4OctetRange(secondOctet)) { + return false; + } + int thirdOctet = Integer.parseInt(parts[2]); + if (!isWithinIPv4OctetRange(thirdOctet)) { + return false; + } + int fourthOctet = Integer.parseInt(parts[3]); + if (!isWithinIPv4OctetRange(fourthOctet)) { + return false; + } if (firstOctet == 10) { return true; @@ -655,6 +670,10 @@ private static boolean isPrivateIPv4Address(String ipAddress) { } } + private static boolean isWithinIPv4OctetRange(int number) { + return number >= 0 && number <= 255; + } + public static Map> getComputerTags(Computer computer) { Set labels = null; try { @@ -942,21 +961,13 @@ public static String toJson(final Map map) { /** * Removes all actions related to traces for Jenkins pipelines. * - * @param run the current run. + * @param actionable a domain object that can contain actions, such as run or flow node. */ - public static void cleanUpTraceActions(final Run run) { - if (run != null) { - // Each call to removeActions triggers persisting run data to disc. - // To avoid writing to disc multiple times, we only call removeActions once with the marker interface as the argument. - run.removeActions(DatadogPluginAction.class); - } - } - - public static void cleanUpTraceActions(FlowNode flowNode) { - if (flowNode != null) { - // Each call to removeActions triggers persisting node data to disc. + public static void cleanUpTraceActions(final Actionable actionable) { + if (actionable != null) { + // Each call to removeActions triggers persisting data to disc. // To avoid writing to disc multiple times, we only call removeActions once with the marker interface as the argument. - flowNode.removeActions(DatadogPluginAction.class); + actionable.removeActions(DatadogPluginAction.class); } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java index 7a30ac43c..dee696bce 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java @@ -43,8 +43,10 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; import org.datadog.jenkins.plugins.datadog.clients.Metrics; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.model.Status; +import org.datadog.jenkins.plugins.datadog.model.TraceInfoAction; import org.datadog.jenkins.plugins.datadog.model.node.NodeInfoAction; import org.datadog.jenkins.plugins.datadog.model.node.StatusAction; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriter; @@ -146,6 +148,36 @@ public void onNewHead(FlowNode flowNode) { } } + /** + * This is the method responsible for tracing pipeline steps. + * The start and finish of every pipeline step is reported to pipeline "graph" listeners as an instance of {@code FlowNode}. + * The nodes in a pipeline graph can be divided into two categories: + *

    + *
  1. Atomic steps. Since these steps are "atomic", we only receive one event for such steps ({@link StepAtomNode}) that is emitted before the step starts
  2. + *
  3. Block steps. Blocks are compound steps that can contain atomic steps or other blocks. For every block we receive two events: {@link BlockStartNode} and {@link BlockEndNode} that correspond to the start and end of the block respectively.
  4. + *
+ * + * We trace all atomic steps, but not all blocks. + * We only trace those blocks that conform to our stage definition in {@link DatadogUtilities#isStageNode(FlowNode)}. + * + *

+ * When a step that we trace finishes, we do the following: + *

    + *
  • Create a domain object that contains traced data ({@link PipelineStepData})
  • + *
  • Propagate step status to its parents (if needed)
  • + *
  • Submit step data for serialization and dispatch
  • + *
+ * + * When gathering traced data, children steps will need to know certain information about their parent + * (first and foremost, the parent's span ID for creating parent-child links in Datadog - see {@link TraceInfoAction}), + * and parent steps will need to know certain information about their children + * (for example, if a children execution finished with an error, this error might need to be propagated to parent). + * This information is stored in various children of {@link DatadogPluginAction}. + * Those actions that store data relevant for a specific step are attached to {@link FlowNode}. + * Those that store data relevant for the pipeline as a whole are attached to {@link WorkflowRun}. + * These actions are persisted to disk along with the rest of the pipeline/node state, + * so it is important to make sure that they contain as little data as possible, and that they are removed once no longer needed. + */ private void processNode(WorkflowRun run, FlowNode flowNode) { try { List parents = flowNode.getParents(); @@ -174,12 +206,12 @@ private void processStageNode(WorkflowRun run, BlockEndNode blockEndNode) { private void processNode(WorkflowRun run, FlowNode node, FlowNode nextNode) { try { - BuildPipelineNode pipelineNode = buildPipelineNode(run, node, nextNode); + PipelineStepData stepData = buildStepData(run, node, nextNode); propagateStatus(node, nextNode); TraceWriter traceWriter = TraceWriterFactory.getTraceWriter(); if (traceWriter != null) { - traceWriter.submitPipelineStep(pipelineNode, run); + traceWriter.submitPipelineStep(stepData, run); } } catch (InterruptedException e) { @@ -251,14 +283,14 @@ private static Status getPropagatedStatus(FlowNode node, @Nullable FlowNode next } } - private BuildPipelineNode buildPipelineNode(WorkflowRun run, FlowNode node, FlowNode nextNode) { + private PipelineStepData buildStepData(WorkflowRun run, FlowNode node, FlowNode nextNode) { long start = System.currentTimeMillis(); try { if (node instanceof StepAtomNode) { - return new BuildPipelineNode(run, (StepAtomNode) node, nextNode); + return new PipelineStepData(run, (StepAtomNode) node, nextNode); } else if (node instanceof BlockEndNode) { BlockEndNode endNode = (BlockEndNode) node; - return new BuildPipelineNode(run, endNode.getStartNode(), endNode); + return new PipelineStepData(run, endNode.getStartNode(), endNode); } else { throw new IllegalArgumentException("Unexpected flow node type: " + node); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java index 7b344e1c1..e87acc6ce 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java @@ -236,6 +236,18 @@ private void updateBuildData(Run run, Map envVars) { } } + /** + * Examine the step's environment to see if it contains any variables that hold git-related data. + * It could be variables that are set manually by the pipeline authors (such variables will have {@code DD_} prefix), + * or variables that are automatically set by the Jenkins Git Plugin. + *

+ * Whatever data we manage to extract, we save in {@link GitCommitAction} that is associated with the pipeline. + * It'll later be used to populate git tags both in the pipeline span, and in the spans that correspond to other pipeline steps. + *

+ * The reason we examine step environment, rather than checking the pipeline environment (even though the pipeline has its own copy of {@link EnvVars}) + * is that the pipeline env is minimal and misses many env vars, including the ones that are set manually, + * while the step env contains much more data. + */ private static void updateGitData(Run run, Map envVars) { GitCommitAction commitAction = run.getAction(GitCommitAction.class); if (commitAction != null) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java index 82ae7f6a1..06f5a36ae 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java @@ -31,7 +31,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; @@ -74,7 +74,7 @@ public void write(String line) { payload.put("ddsource", "jenkins"); payload.put("service", "jenkins"); payload.put("timestamp", System.currentTimeMillis()); - payload.put(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._NAME, this.buildData.getBaseJobName("")); + payload.put(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._NAME, this.buildData.getBaseJobName("")); // Get Datadog Client Instance DatadogClient client = ClientFactory.getClient(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java index 37e8b903d..b89802ad7 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java @@ -388,10 +388,22 @@ private void populateEnvVariables(EnvVars envVars){ this.promotedJobFullName = envVars.get("PROMOTED_JOB_FULL_NAME"); } - /** * Populate git commit related information in the BuildData instance. - * @param run + * The data is retrieved from {@link GitRepositoryAction} and {@link GitCommitAction} that are associated with the build. + * The actions are populated from two main sources: + *

    + *
  1. Environment variables of specific pipeline steps: + * pipeline object has its own set of env variables, but it is minimal; + * the whole set of env variables + * (including those that are set by Jenkins Git Plugin or manually by the pipeline authors) + * is only available for individual pipeline steps. + * That is why in {@link org.datadog.jenkins.plugins.datadog.listeners.DatadogStepListener} + * we examine the full set of env vars to see if we can extract any git-related info
  2. + *
  3. Git repositories that were checked out during pipeline execution: + * {@link org.datadog.jenkins.plugins.datadog.listeners.DatadogSCMListener} is notified of every source-code checkout. + * If the checked out repo is a git repository, we create a git client and examine repository metadata
  4. + *
*/ private void populateGitVariables(Run run) { GitRepositoryAction gitRepositoryAction = run.getAction(GitRepositoryAction.class); @@ -404,7 +416,6 @@ private void populateGitVariables(Run run) { /** * Populate the information related to the commit (message, author and committer) based on the GitCommitAction * only if the user has not set the value manually. - * @param gitCommitAction */ private void populateCommitInfo(GitCommitAction gitCommitAction) { if(gitCommitAction != null) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java index f99ccd078..d0145c8f5 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java @@ -6,6 +6,7 @@ import com.thoughtworks.xstream.io.HierarchicalStreamReader; import com.thoughtworks.xstream.io.HierarchicalStreamWriter; import java.util.Objects; +import javax.annotation.Nullable; import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; /** @@ -28,6 +29,7 @@ public GitRepositoryAction(String repositoryURL, String defaultBranch, String br this.branch = branch; } + @Nullable public String getRepositoryURL() { return repositoryURL; } @@ -36,6 +38,7 @@ public void setRepositoryURL(String repositoryURL) { this.repositoryURL = repositoryURL; } + @Nullable public String getDefaultBranch() { return defaultBranch; } @@ -44,6 +47,7 @@ public void setDefaultBranch(String defaultBranch) { this.defaultBranch = defaultBranch; } + @Nullable public String getBranch() { return branch; } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineStepData.java similarity index 85% rename from src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java rename to src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineStepData.java index 479f0bdb8..e2451eaf7 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineStepData.java @@ -18,11 +18,11 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; /** - * Represent a stage of the Jenkins Pipeline. + * Represents a step in a Jenkins Pipeline. */ -public class BuildPipelineNode { +public class PipelineStepData { - public enum NodeType { + public enum StepType { PIPELINE("ci.pipeline", "pipeline"), STAGE("ci.stage", "stage"), STEP("ci.job", "job"); @@ -30,7 +30,7 @@ public enum NodeType { private final String tagName; private final String buildLevel; - NodeType(final String tagName, final String buildLevel) { + StepType(final String tagName, final String buildLevel) { this.tagName = tagName; this.buildLevel = buildLevel; } @@ -49,7 +49,7 @@ public String getBuildLevel() { private String stageId; private String stageName; - private NodeType type; + private StepType type; private Map args; private String workspace; private String nodeName; @@ -71,10 +71,10 @@ public String getBuildLevel() { private long parentSpanId = -1; private long traceId; - public BuildPipelineNode(final Run run, final BlockStartNode startNode, final BlockEndNode endNode) { + public PipelineStepData(final Run run, final BlockStartNode startNode, final BlockEndNode endNode) { this(run, startNode); - this.type = NodeType.STAGE; + this.type = StepType.STAGE; this.startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(startNode)); if (startTimeMicros < 0) { @@ -100,10 +100,10 @@ public BuildPipelineNode(final Run run, final BlockStartNode startNode, fi } } - public BuildPipelineNode(final Run run, final StepAtomNode stepNode, final FlowNode nextNode) { + public PipelineStepData(final Run run, final StepAtomNode stepNode, final FlowNode nextNode) { this(run, stepNode); - this.type = NodeType.STEP; + this.type = StepType.STEP; this.startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(stepNode)); if (startTimeMicros < 0) { @@ -132,9 +132,14 @@ public BuildPipelineNode(final Run run, final StepAtomNode stepNode, final } } - private BuildPipelineNode(final Run run, FlowNode startNode) { + private PipelineStepData(final Run run, FlowNode startNode) { TraceInfoAction traceInfoAction = run.getAction(TraceInfoAction.class); if (traceInfoAction != null) { + /* + * Use "remove-or-create" semantics: + * - if the ID is there in the action, remove it since it is no longer needed (we're about to submit this node and be done with it) + * - if the ID is not there, create a new one on the spot without saving it in the action (IDs are initialized lazily, if the node's ID is not there, it means the node had no children that needed to know its ID) + */ Long spanId = traceInfoAction.removeOrCreate(startNode.getId()); if (spanId != null) { this.spanId = spanId; @@ -144,6 +149,10 @@ private BuildPipelineNode(final Run run, FlowNode startNode) { "It is possible that CI Visibility was enabled while this step was in progress"); } + /* + * Find node's parent: iterate over the blocks that contain it, starting with the innermost, + * until we find a block that is included in the trace (a block that corresponds to a stage). + */ BlockStartNode enclosingStage = DatadogUtilities.getEnclosingStageNode(startNode); if (enclosingStage != null) { this.stageId = enclosingStage.getId(); @@ -159,6 +168,11 @@ private BuildPipelineNode(final Run run, FlowNode startNode) { if (buildSpanAction != null) { TraceSpan.TraceSpanContext traceContext = buildSpanAction.getBuildSpanContext(); this.traceId = traceContext.getTraceId(); + + /* + * If we didn't find this node's parent previously, + * then it is a top-level stage, so its parent will be the span that correspond to the build as a whole. + */ if (this.parentSpanId == -1) { this.parentSpanId = traceContext.getSpanId(); } @@ -261,7 +275,7 @@ public long getTraceId() { return traceId; } - public NodeType getType() { + public StepType getType() { return type; } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java index e398b9146..989f13803 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java @@ -13,6 +13,26 @@ import org.datadog.jenkins.plugins.datadog.traces.IdGenerator; import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter; +/** + * This action stores mapping between IDs of {@link org.jenkinsci.plugins.workflow.graph.FlowNode} + * that are generated by Jenkins, and span IDs that are generated by the plugin. + *

+ * Span ID is submitted with the rest of the tracing data when a pipeline step finishes, + * but in certain cases it needs to be known before that: + *

    + *
  • before an atomic step starts, we add its span ID to the step's environment so that the logic inside the step could create custom spans that are linked as children to the step's span
  • + *
  • if a stage step contains children, its children need to know their parent stage's span ID to use as their parent ID
  • + *
+ * For reasons above, we generate IDs for flow nodes on demand, and store them here + * until execution of the corresponding nodes finishes. + * Once the execution finishes, the IDs are no longer needed and can be removed. + * It is important to remove IDs, because this action is regularly dumped to disk, so it should contain as little data as possible. + *

+ * There is a single trace info action associated with the pipeline, + * rather than a separate action with a single ID associated with the node. + * This is done for performance reasons, as changes to run actions are batched, + * while changes to node actions are written to disk immediately. + */ public class TraceInfoAction extends DatadogPluginAction { private final ConcurrentMap spanIdByNodeId; diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java index 7dfd2003d..9c0061674 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java @@ -44,6 +44,8 @@ protected Set getNodeLabels(Run run, final String nodeName) { return Collections.emptySet(); } + // First examine PipelineNodeInfoAction associated with the build. + // The action is populated in step listener based on environment and executor data available for pipeline steps. final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); if(pipelineNodeInfoAction != null) { return pipelineNodeInfoAction.getNodeLabels(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java index 34c3136aa..64af28e16 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java @@ -8,7 +8,7 @@ import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction; import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; @@ -21,14 +21,18 @@ public abstract class DatadogBasePipelineLogic { protected static final String CI_PROVIDER = "jenkins"; protected static final String HOSTNAME_NONE = "none"; - public abstract JSONObject toJson(BuildPipelineNode current, Run run) throws IOException, InterruptedException; + public abstract JSONObject toJson(PipelineStepData current, Run run) throws IOException, InterruptedException; @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE") - protected Set getNodeLabels(Run run, BuildPipelineNode current, String nodeName) { + protected Set getNodeLabels(Run run, PipelineStepData current, String nodeName) { final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class); if (current.getNodeLabels() != null && !current.getNodeLabels().isEmpty()) { + // First examine if current step has info about the node it was executed on. return current.getNodeLabels(); + } else if (pipelineNodeInfoAction != null && !pipelineNodeInfoAction.getNodeLabels().isEmpty()) { + // Examine PipelineNodeInfoAction associated with the pipeline. + // The action is populated in step listener based on environment and executor data available for pipeline steps. return pipelineNodeInfoAction.getNodeLabels(); } @@ -51,7 +55,7 @@ protected Set getNodeLabels(Run run, BuildPipelineNode current, String n return Collections.emptySet(); } - protected String getNodeName(BuildPipelineNode current, BuildData buildData) { + protected String getNodeName(PipelineStepData current, BuildData buildData) { if (current.getNodeName() != null) { return current.getNodeName(); } @@ -61,20 +65,20 @@ protected String getNodeName(BuildPipelineNode current, BuildData buildData) { // - DatadogBuildListener#onInitialize created a BuildData instance // - that BuildData had its nodeName populated from environment variables obtained from Run // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback) - // For pipelines, the environment variables that Run#getEnvironment returns _at the beginning of the run_ always (!) contain NODE_NAME = "built-in" - // This is true regardless of whether the pipeline definition has a top-level agent block or not - // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build + // For pipelines, the environment variables that Run#getEnvironment returns at the beginning of the run always (!) contain NODE_NAME = "built-in" (when invoked at the end of the run, the env will have a different set of variables). + // This is true regardless of whether the pipeline definition has a top-level agent block or not. + // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build's lifecycle. return buildData.getNodeName("built-in"); } - protected String getNodeHostname(BuildPipelineNode current, BuildData buildData) { + protected String getNodeHostname(PipelineStepData current, BuildData buildData) { if (current.getNodeHostname() != null) { return current.getNodeHostname(); } return buildData.getHostname(""); } - protected String buildOperationName(BuildPipelineNode current) { + protected String buildOperationName(PipelineStepData current) { return CI_PROVIDER + "." + current.getType().name().toLowerCase(); } } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java index b0a305cd8..211467c07 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java @@ -19,7 +19,7 @@ import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; @@ -58,8 +58,8 @@ public TraceSpan toSpan(final BuildData buildData, final Run run) { return null; } - final String prefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); - final String buildLevel = BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(); + final String prefix = PipelineStepData.StepType.PIPELINE.getTagName(); + final String buildLevel = PipelineStepData.StepType.PIPELINE.getBuildLevel(); final long endTimeMicros = buildData.getEndTime(0L) * 1000; buildSpan.setServiceName(DatadogUtilities.getDatadogGlobalDescriptor().getCiInstanceName()); @@ -90,9 +90,9 @@ public TraceSpan toSpan(final BuildData buildData, final Run run) { // - DatadogBuildListener#onInitialize created a BuildData instance // - that BuildData had its nodeName populated from environment variables obtained from Run // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback) - // For pipelines, the environment variables that Run#getEnvironment returns _at the beginning of the run_ always (!) contain NODE_NAME = "built-in" - // This is true regardless of whether the pipeline definition has a top-level agent block or not - // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build + // For pipelines, the environment variables that Run#getEnvironment returns at the beginning of the run always (!) contain NODE_NAME = "built-in" (when invoked at the end of the run, the env will have a different set of variables). + // This is true regardless of whether the pipeline definition has a top-level agent block or not. + // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build's lifecycle. String nodeName = buildData.getNodeName("built-in"); buildSpan.putMeta(CITags.WORKSPACE_PATH, buildData.getWorkspace("")); buildSpan.putMeta(CITags.NODE_NAME, nodeName); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java index 1c1e414f0..839065bf1 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java @@ -1,7 +1,7 @@ package org.datadog.jenkins.plugins.datadog.traces; import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.toJson; -import static org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode.NodeType.PIPELINE; +import static org.datadog.jenkins.plugins.datadog.model.PipelineStepData.StepType.PIPELINE; import static org.datadog.jenkins.plugins.datadog.traces.CITags.Values.ORIGIN_CIAPP_PIPELINE; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.filterSensitiveInfo; import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch; @@ -19,7 +19,7 @@ import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.model.Status; import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; @@ -36,14 +36,14 @@ public class DatadogTracePipelineLogic extends DatadogBasePipelineLogic { @Nonnull @Override - public JSONObject toJson(BuildPipelineNode flowNode, Run run) throws IOException, InterruptedException { + public JSONObject toJson(PipelineStepData flowNode, Run run) throws IOException, InterruptedException { TraceSpan span = toSpan(flowNode, run); return jsonTraceSpanMapper.map(span); } // hook for tests @Nonnull - public TraceSpan toSpan(BuildPipelineNode current, Run run) throws IOException, InterruptedException { + public TraceSpan toSpan(PipelineStepData current, Run run) throws IOException, InterruptedException { BuildData buildData = new BuildData(run, DatadogUtilities.getTaskListener(run)); // If the root span has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines spans @@ -88,13 +88,13 @@ public TraceSpan toSpan(BuildPipelineNode current, Run run) throws IOExcep return span; } - private Map buildTraceMetrics(BuildPipelineNode current) { + private Map buildTraceMetrics(PipelineStepData current) { final Map metrics = new HashMap<>(); metrics.put(CITags.QUEUE_TIME, TimeUnit.NANOSECONDS.toSeconds(current.getNanosInQueue())); return metrics; } - private Map buildTraceTags(final Run run, final BuildPipelineNode current, final BuildData buildData) { + private Map buildTraceTags(final Run run, final PipelineStepData current, final BuildData buildData) { final String prefix = current.getType().getTagName(); final String buildLevel = current.getType().getBuildLevel(); @@ -219,8 +219,8 @@ private Map buildTraceTags(final Run run, final BuildPipel tags.put(PIPELINE.getTagName() + CITags._ID, buildData.getBuildTag("")); // Propagate Stage Name - if(!BuildPipelineNode.NodeType.STAGE.equals(current.getType()) && current.getStageName() != null) { - tags.put(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME, current.getStageName()); + if(!PipelineStepData.StepType.STAGE.equals(current.getType()) && current.getStageName() != null) { + tags.put(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME, current.getStageName()); } // CI Tags propagation diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java index 0a0d2a7a2..6990cd30c 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java @@ -19,7 +19,7 @@ import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; @@ -62,7 +62,7 @@ public JSONObject toJson(final BuildData buildData, final Run run) { final long endTimeMillis = buildData.getEndTime(0L) - propagatedMillisInQueue; final String jenkinsResult = buildData.getResult(""); final String status = statusFromResult(jenkinsResult); - final String prefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); + final String prefix = PipelineStepData.StepType.PIPELINE.getTagName(); final String rawGitBranch = buildData.getBranch(""); final String gitBranch = normalizeBranch(rawGitBranch); // Check if the user set manually the DD_GIT_TAG environment variable. @@ -72,7 +72,7 @@ public JSONObject toJson(final BuildData buildData, final Run run) { .orElse(normalizeTag(rawGitBranch)); JSONObject payload = new JSONObject(); - payload.put("level", BuildPipelineNode.NodeType.PIPELINE.getBuildLevel()); + payload.put("level", PipelineStepData.StepType.PIPELINE.getBuildLevel()); payload.put("url", buildData.getBuildUrl("")); payload.put("start", DatadogUtilities.toISO8601(new Date(startTimeMillis))); payload.put("end", DatadogUtilities.toISO8601(new Date(endTimeMillis))); @@ -160,9 +160,9 @@ public JSONObject toJson(final BuildData buildData, final Run run) { // - DatadogBuildListener#onInitialize created a BuildData instance // - that BuildData had its nodeName populated from environment variables obtained from Run // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback) - // For pipelines, the environment variables that Run#getEnvironment returns _at the beginning of the run_ always (!) contain NODE_NAME = "built-in" - // This is true regardless of whether the pipeline definition has a top-level agent block or not - // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build + // For pipelines, the environment variables that Run#getEnvironment returns at the beginning of the run always (!) contain NODE_NAME = "built-in" (when invoked at the end of the run, the env will have a different set of variables). + // This is true regardless of whether the pipeline definition has a top-level agent block or not. + // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build's lifecycle. final String nodeName = buildData.getNodeName("built-in"); nodePayload.put("name", nodeName); if(!DatadogUtilities.isMainNode(nodeName)) { diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java index b6ea079f0..fb017274b 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java @@ -19,7 +19,7 @@ import org.apache.commons.lang.StringUtils; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.model.Status; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; @@ -31,7 +31,7 @@ public class DatadogWebhookPipelineLogic extends DatadogBasePipelineLogic { @Nonnull @Override - public JSONObject toJson(BuildPipelineNode current, Run run) throws IOException, InterruptedException { + public JSONObject toJson(PipelineStepData current, Run run) throws IOException, InterruptedException { BuildData buildData = new BuildData(run, DatadogUtilities.getTaskListener(run)); JSONObject payload = new JSONObject(); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java index fe85be063..6afa67467 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java @@ -12,7 +12,7 @@ import javax.annotation.Nullable; import org.datadog.jenkins.plugins.datadog.clients.DatadogAgentClient; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; /** * Trace write strategy that can dynamically switch from using APM track to using EVP Proxy. @@ -55,8 +55,8 @@ public Payload serialize(BuildData buildData, Run run) { @Nullable @Override - public Payload serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException { - return getCurrentStrategy().serialize(node, run); + public Payload serialize(PipelineStepData stepData, Run run) throws IOException, InterruptedException { + return getCurrentStrategy().serialize(stepData, run); } @Override diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java index cdfeeb372..68323ae86 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java @@ -5,14 +5,14 @@ import java.util.Collection; import javax.annotation.Nullable; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; public interface TraceWriteStrategy { @Nullable Payload serialize(BuildData buildData, Run run); @Nullable - Payload serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException; + Payload serialize(PipelineStepData stepData, Run run) throws IOException, InterruptedException; void send(Collection spans); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java index e30641a4c..57c0f3ae2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java @@ -9,7 +9,7 @@ import net.sf.json.JSONObject; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; @@ -54,8 +54,8 @@ public Payload serialize(final BuildData buildData, final Run run) { @Nullable @Override - public Payload serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException { - JSONObject stepSpan = pipelineLogic.toJson(node, run); + public Payload serialize(PipelineStepData stepData, Run run) throws IOException, InterruptedException { + JSONObject stepSpan = pipelineLogic.toJson(stepData, run); return stepSpan != null ? new Payload(stepSpan, track) : null; } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java index 8980454e7..c82bbceb4 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java @@ -13,7 +13,7 @@ import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; public final class TraceWriter { @@ -58,8 +58,8 @@ public void submitBuild(final BuildData buildData, final Run run) throws In submit(span); } - public void submitPipelineStep(BuildPipelineNode node, Run run) throws InterruptedException, TimeoutException, IOException { - Payload span = traceWriteStrategy.serialize(node, run); + public void submitPipelineStep(PipelineStepData stepData, Run run) throws InterruptedException, TimeoutException, IOException { + Payload span = traceWriteStrategy.serialize(stepData, run); submit(span); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java index 39d7fc88c..15a2ed0bb 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java @@ -1,6 +1,7 @@ package org.datadog.jenkins.plugins.datadog.util.git; import java.io.Serializable; +import javax.annotation.Nullable; public class RepositoryInfo implements Serializable { @@ -16,14 +17,17 @@ public RepositoryInfo(String repoUrl, String defaultBranch, String branch) { this.branch = branch; } + @Nullable public String getRepoUrl() { return repoUrl; } + @Nullable public String getDefaultBranch() { return defaultBranch; } + @Nullable public String getBranch() { return branch; } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java index 4da146b94..b5305b06a 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java @@ -25,6 +25,8 @@ of this software and associated documentation files (the "Software"), to deal package org.datadog.jenkins.plugins.datadog; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.when; @@ -67,55 +69,55 @@ public void setUpMocks() { @Test public void testCstrToList(){ - Assert.assertTrue(DatadogUtilities.cstrToList(null).isEmpty()); - Assert.assertTrue(DatadogUtilities.cstrToList("").isEmpty()); - Assert.assertTrue(DatadogUtilities.cstrToList(" , ").isEmpty()); + assertTrue(DatadogUtilities.cstrToList(null).isEmpty()); + assertTrue(DatadogUtilities.cstrToList("").isEmpty()); + assertTrue(DatadogUtilities.cstrToList(" , ").isEmpty()); List items = new ArrayList<>(); items.add("item1"); - Assert.assertTrue(DatadogUtilities.cstrToList("item1").equals(items)); - Assert.assertTrue(DatadogUtilities.cstrToList(" item1 ").equals(items)); - Assert.assertTrue(DatadogUtilities.cstrToList(" , item1 , ").equals(items)); + assertTrue(DatadogUtilities.cstrToList("item1").equals(items)); + assertTrue(DatadogUtilities.cstrToList(" item1 ").equals(items)); + assertTrue(DatadogUtilities.cstrToList(" , item1 , ").equals(items)); items = new ArrayList<>(); items.add("item1"); items.add("item2"); - Assert.assertTrue(DatadogUtilities.cstrToList("item1,item2").equals(items)); - Assert.assertTrue(DatadogUtilities.cstrToList(" item1 , item2 ").equals(items)); - Assert.assertTrue(DatadogUtilities.cstrToList(" , item1 , item2 , ").equals(items)); + assertTrue(DatadogUtilities.cstrToList("item1,item2").equals(items)); + assertTrue(DatadogUtilities.cstrToList(" item1 , item2 ").equals(items)); + assertTrue(DatadogUtilities.cstrToList(" , item1 , item2 , ").equals(items)); } @Test public void testLinesToList(){ - Assert.assertTrue(DatadogUtilities.linesToList(null).isEmpty()); - Assert.assertTrue(DatadogUtilities.linesToList("").isEmpty()); + assertTrue(DatadogUtilities.linesToList(null).isEmpty()); + assertTrue(DatadogUtilities.linesToList("").isEmpty()); List items = new ArrayList<>(); items.add("item1"); - Assert.assertTrue(DatadogUtilities.linesToList("item1").equals(items)); - Assert.assertTrue(DatadogUtilities.linesToList(" item1 ").equals(items)); - Assert.assertTrue(DatadogUtilities.linesToList(" \n item1 \n ").equals(items)); + assertTrue(DatadogUtilities.linesToList("item1").equals(items)); + assertTrue(DatadogUtilities.linesToList(" item1 ").equals(items)); + assertTrue(DatadogUtilities.linesToList(" \n item1 \n ").equals(items)); items = new ArrayList<>(); items.add("item1"); items.add("item2"); - Assert.assertTrue(DatadogUtilities.linesToList("item1\nitem2").equals(items)); - Assert.assertTrue(DatadogUtilities.linesToList(" item1 \n item2 ").equals(items)); - Assert.assertTrue(DatadogUtilities.linesToList(" \n item1 \n item2 \n ").equals(items)); + assertTrue(DatadogUtilities.linesToList("item1\nitem2").equals(items)); + assertTrue(DatadogUtilities.linesToList(" item1 \n item2 ").equals(items)); + assertTrue(DatadogUtilities.linesToList(" \n item1 \n item2 \n ").equals(items)); } @Test public void isStageNodeTest() { - Assert.assertFalse(DatadogUtilities.isStageNode(null)); + assertFalse(DatadogUtilities.isStageNode(null)); BlockStartNode node = mock(BlockStartNode.class); - Assert.assertFalse(DatadogUtilities.isStageNode(node)); + assertFalse(DatadogUtilities.isStageNode(node)); when(node.getAction(LabelAction.class)).thenReturn(mock(LabelAction.class)); - Assert.assertTrue(DatadogUtilities.isStageNode(node)); + assertTrue(DatadogUtilities.isStageNode(node)); when(node.getAction(ThreadNameAction.class)).thenReturn(mock(ThreadNameAction.class)); - Assert.assertFalse(DatadogUtilities.isStageNode(node)); + assertFalse(DatadogUtilities.isStageNode(node)); } @Test @@ -213,4 +215,28 @@ public void testGetHostname() throws IOException { } } + @Test + public void testIsPrivateIPv4Address() { + assertFalse(DatadogUtilities.isPrivateIPv4Address(null)); + assertFalse(DatadogUtilities.isPrivateIPv4Address("")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("google.com")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("my.subdomain.domain.com")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("123.456.789.012")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("10.0.my-domain.com")); + assertTrue(DatadogUtilities.isPrivateIPv4Address("10.0.0.1")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("10.0.0.1.1")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("10.0.0.1.org")); + assertTrue(DatadogUtilities.isPrivateIPv4Address("10.255.255.255")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("10.255.255.256")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("10.255.256.255")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("10.-1.255.255")); + assertTrue(DatadogUtilities.isPrivateIPv4Address("172.16.0.1")); + assertTrue(DatadogUtilities.isPrivateIPv4Address("172.31.0.1")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("172.15.0.1")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("172.32.0.1")); + assertTrue(DatadogUtilities.isPrivateIPv4Address("192.168.0.1")); + assertTrue(DatadogUtilities.isPrivateIPv4Address("192.168.255.255")); + assertFalse(DatadogUtilities.isPrivateIPv4Address("192.167.255.255")); + } + } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java index bc89baa77..9acc22a2f 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java @@ -46,7 +46,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.DatadogClient; import org.datadog.jenkins.plugins.datadog.DatadogEvent; import org.datadog.jenkins.plugins.datadog.model.BuildData; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic; import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic; @@ -306,16 +306,16 @@ public Payload serialize(BuildData buildData, Run run) { @Nullable @Override - public Payload serialize(BuildPipelineNode node, Run run) throws IOException, InterruptedException { + public Payload serialize(PipelineStepData stepData, Run run) throws IOException, InterruptedException { if (isWebhook) { - JSONObject json = new DatadogWebhookPipelineLogic().toJson(node, run); + JSONObject json = new DatadogWebhookPipelineLogic().toJson(stepData, run); if (json == null) { return null; } webhooks.add(json); return new Payload(json, Track.WEBHOOK); } else { - TraceSpan span = new DatadogTracePipelineLogic().toSpan(node, run); + TraceSpan span = new DatadogTracePipelineLogic().toSpan(stepData, run); if (span == null) { return null; } diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java index eb4f8f88e..f37dc8f20 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java @@ -43,7 +43,7 @@ import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; import org.datadog.jenkins.plugins.datadog.clients.DatadogClientStub; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.jetbrains.annotations.NotNull; @@ -154,7 +154,7 @@ public void testTraces() throws Exception { env.put("WORKSPACE", ws.getRemote()); FreeStyleBuild run = project.scheduleBuild2(0).get(); - final String buildPrefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); + final String buildPrefix = PipelineStepData.StepType.PIPELINE.getTagName(); clientStub.waitForTraces(1); final List spans = clientStub.getSpans(); @@ -164,8 +164,8 @@ public void testTraces() throws Exception { assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote())); final Map meta = buildSpan.getMeta(); final Map metrics = buildSpan.getMetrics(); - assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_BUILD_LEVEL)); - assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_LEVEL)); + assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_BUILD_LEVEL)); + assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_LEVEL)); assertEquals(ORIGIN_CIAPP_PIPELINE, meta.get(CITags._DD_ORIGIN)); assertEquals("jenkins.build", buildSpan.getOperationName()); assertEquals(SAMPLE_SERVICE_NAME, buildSpan.getServiceName()); diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java index 4fdcbe46a..95d0ac7af 100644 --- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java +++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java @@ -52,7 +52,7 @@ import org.datadog.jenkins.plugins.datadog.DatadogUtilities; import org.datadog.jenkins.plugins.datadog.clients.ClientFactory; import org.datadog.jenkins.plugins.datadog.clients.DatadogClientStub; -import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode; +import org.datadog.jenkins.plugins.datadog.model.PipelineStepData; import org.datadog.jenkins.plugins.datadog.traces.CITags; import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan; import org.jenkinsci.plugins.workflow.actions.LabelAction; @@ -661,18 +661,18 @@ public void testStageNamePropagation() throws Exception{ assertEquals(6, spans.size()); final TraceSpan stage1 = searchSpan(spans, "Stage 1"); - final String stage1Name = stage1.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME); + final String stage1Name = stage1.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME); assertTrue(stage1Name != null && !stage1Name.isEmpty()); final TraceSpan stepStage1 = searchFirstChild(spans, stage1); - assertEquals(stage1Name, stepStage1.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME)); + assertEquals(stage1Name, stepStage1.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME)); final TraceSpan stage2 = searchSpan(spans, "Stage 2"); - final String stage2Name = stage2.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME); + final String stage2Name = stage2.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME); assertTrue(stage2Name != null && !stage2Name.isEmpty()); final TraceSpan stepStage2 = searchFirstChild(spans, stage2); - assertEquals(stage2Name, stepStage2.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME)); + assertEquals(stage2Name, stepStage2.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME)); } @Test @@ -885,9 +885,9 @@ public void testIntegrationNoFailureTag() throws Exception { clientStub.assertMetric("jenkins.job.stage_duration", hostname, tags); clientStub.assertMetric("jenkins.job.stage_pause_duration", 0, hostname, tags); - final String buildPrefix = BuildPipelineNode.NodeType.PIPELINE.getTagName(); - final String stagePrefix = BuildPipelineNode.NodeType.STAGE.getTagName(); - final String stepPrefix = BuildPipelineNode.NodeType.STEP.getTagName(); + final String buildPrefix = PipelineStepData.StepType.PIPELINE.getTagName(); + final String stagePrefix = PipelineStepData.StepType.STAGE.getTagName(); + final String stepPrefix = PipelineStepData.StepType.STEP.getTagName(); clientStub.waitForTraces(3); final List spans = clientStub.getSpans(); @@ -914,8 +914,8 @@ public void testIntegrationNoFailureTag() throws Exception { assertEquals("success", buildSpanMeta.get(CITags.JENKINS_RESULT)); assertEquals("jenkins-pipelineIntegrationSuccess-1", buildSpanMeta.get(CITags.JENKINS_TAG)); assertEquals("false", buildSpanMeta.get(CITags._DD_CI_INTERNAL)); - assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); - assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_LEVEL)); + assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); + assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_LEVEL)); assertNotNull(buildSpanMeta.get(CITags._DD_CI_STAGES)); assertTrue(buildSpanMeta.get(CITags._DD_CI_STAGES).contains("{\"name\":\"test\",\"duration\"")); @@ -935,10 +935,10 @@ public void testIntegrationNoFailureTag() throws Exception { checkHostNameTag(stageSpanMeta); assertEquals("false", stageSpanMeta.get(CITags._DD_CI_INTERNAL)); assertEquals("4", stageSpanMeta.get(stagePrefix + CITags._NUMBER)); - assertEquals(BuildPipelineNode.NodeType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); - assertEquals(BuildPipelineNode.NodeType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_LEVEL)); - assertEquals("jenkins-pipelineIntegrationSuccess-1", stageSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._ID)); - assertEquals("pipelineIntegrationSuccess", stageSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._NAME)); + assertEquals(PipelineStepData.StepType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); + assertEquals(PipelineStepData.StepType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_LEVEL)); + assertEquals("jenkins-pipelineIntegrationSuccess-1", stageSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._ID)); + assertEquals("pipelineIntegrationSuccess", stageSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._NAME)); assertNotNull(stageSpan.getMetrics().get(CITags.QUEUE_TIME)); final TraceSpan stepSpan = spans.get(2); @@ -959,11 +959,11 @@ public void testIntegrationNoFailureTag() throws Exception { checkHostNameTag(stepSpanMeta); assertEquals("false", stepSpanMeta.get(CITags._DD_CI_INTERNAL)); assertEquals("5", stepSpanMeta.get(stepPrefix + CITags._NUMBER)); - assertEquals(BuildPipelineNode.NodeType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); - assertEquals(BuildPipelineNode.NodeType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_LEVEL)); - assertEquals("jenkins-pipelineIntegrationSuccess-1", stepSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._ID)); - assertEquals("pipelineIntegrationSuccess", stepSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._NAME)); - assertEquals("test", stepSpanMeta.get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME)); + assertEquals(PipelineStepData.StepType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_BUILD_LEVEL)); + assertEquals(PipelineStepData.StepType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_LEVEL)); + assertEquals("jenkins-pipelineIntegrationSuccess-1", stepSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._ID)); + assertEquals("pipelineIntegrationSuccess", stepSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._NAME)); + assertEquals("test", stepSpanMeta.get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME)); assertNotNull(stepSpan.getMetrics().get(CITags.QUEUE_TIME)); assertCleanupActions(run); From 88b5e105b815ef18c9a89875dc143b63fbab2ce6 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Mon, 29 Jan 2024 11:26:42 +0100 Subject: [PATCH 16/17] Add max payload size check for webhook requests --- .../jenkins/plugins/datadog/clients/DatadogAgentClient.java | 6 ++++++ .../jenkins/plugins/datadog/clients/DatadogApiClient.java | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java index 9c591cb57..cf0562aee 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogAgentClient.java @@ -66,6 +66,8 @@ of this software and associated documentation files (the "Software"), to deal */ public class DatadogAgentClient implements DatadogClient { + private static final int PAYLOAD_SIZE_LIMIT = 5 * 1024 * 1024; // 5 MB + private static volatile DatadogAgentClient instance = null; // Used to determine if the instance failed last validation last time, so // we do not keep retrying to create the instance and logging the same error @@ -556,6 +558,10 @@ private void sendSpansToWebhook(Collection spans) { } byte[] body = span.getJson().toString().getBytes(StandardCharsets.UTF_8); + if (body.length > PAYLOAD_SIZE_LIMIT) { + logger.severe("Dropping span because payload size (" + body.length + ") exceeds the allowed limit of " + PAYLOAD_SIZE_LIMIT); + continue; + } // webhook intake does not support batch requests logger.fine("Sending webhook"); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java index 2573dbe1f..288137cfd 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/clients/DatadogApiClient.java @@ -59,6 +59,8 @@ of this software and associated documentation files (the "Software"), to deal */ public class DatadogApiClient implements DatadogClient { + private static final int PAYLOAD_SIZE_LIMIT = 5 * 1024 * 1024; // 5 MB + private static volatile DatadogApiClient instance = null; // Used to determine if the instance failed last validation last time, so // we do not keep retrying to create the instance and logging the same error @@ -515,6 +517,10 @@ private void sendSpans(Collection spans) { } byte[] body = span.getJson().toString().getBytes(StandardCharsets.UTF_8); + if (body.length > PAYLOAD_SIZE_LIMIT) { + logger.severe("Dropping span because payload size (" + body.length + ") exceeds the allowed limit of " + PAYLOAD_SIZE_LIMIT); + continue; + } // webhook intake does not support batch requests logger.fine("Sending webhook"); From d51165600b8b55405ab326011831d594a0257c08 Mon Sep 17 00:00:00 2001 From: Nikita Tkachenko Date: Tue, 30 Jan 2024 18:57:47 +0100 Subject: [PATCH 17/17] Fix SpotBugs warnings --- .../datadog/listeners/DatadogGraphListener.java | 2 ++ .../datadog/listeners/DatadogStepListener.java | 2 ++ .../jenkins/plugins/datadog/model/BuildData.java | 13 ++++++++++--- .../plugins/datadog/model/GitCommitAction.java | 3 +++ .../plugins/datadog/model/GitRepositoryAction.java | 3 +++ .../datadog/model/PipelineNodeInfoAction.java | 3 +++ .../datadog/model/PipelineQueueInfoAction.java | 3 +++ .../plugins/datadog/model/node/NodeInfoAction.java | 3 +++ 8 files changed, 29 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java index dee696bce..05f314733 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListener.java @@ -51,6 +51,7 @@ of this software and associated documentation files (the "Software"), to deal import org.datadog.jenkins.plugins.datadog.model.node.StatusAction; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriter; import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriterFactory; +import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.TagsUtil; import org.jenkinsci.plugins.workflow.actions.ThreadNameAction; import org.jenkinsci.plugins.workflow.actions.TimingAction; @@ -73,6 +74,7 @@ public class DatadogGraphListener implements GraphListener { private static final Logger logger = Logger.getLogger(DatadogGraphListener.class.getName()); + @SuppressFBWarnings("REC_CATCH_EXCEPTION") @Override public void onNewHead(FlowNode flowNode) { WorkflowRun run = getRun(flowNode); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java index e87acc6ce..83a168027 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogStepListener.java @@ -31,6 +31,7 @@ import org.datadog.jenkins.plugins.datadog.model.node.NodeInfoAction; import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction; import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils; +import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings; import org.datadog.jenkins.plugins.datadog.util.git.GitUtils; import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode; import org.jenkinsci.plugins.workflow.flow.StepListener; @@ -133,6 +134,7 @@ private static String getNodeName(StepContext stepContext) { * @param stepContext * @return hostname of the remote node. */ + @SuppressFBWarnings("REC_CATCH_EXCEPTION") private static String getNodeHostname(final StepContext stepContext) { try { EnvVars envVars = stepContext.get(EnvVars.class); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java index b89802ad7..afd930aa2 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java @@ -44,6 +44,7 @@ of this software and associated documentation files (the "Software"), to deal import hudson.model.Job; import hudson.model.ParameterValue; import hudson.model.ParametersAction; +import hudson.model.Result; import hudson.model.Run; import hudson.model.StringParameterValue; import hudson.model.TaskListener; @@ -135,7 +136,7 @@ public class BuildData implements Serializable { private String traceId; private String spanId; - public BuildData(Run run, @Nullable TaskListener listener) throws IOException, InterruptedException { + public BuildData(Run run, @Nullable TaskListener listener) throws IOException, InterruptedException { if (run == null) { return; } @@ -180,8 +181,14 @@ public BuildData(Run run, @Nullable TaskListener listener) throws IOException, I } // Set Result and completed status - this.result = run.getResult() == null ? null : run.getResult().toString(); - this.isCompleted = run.getResult() != null && run.getResult().completeBuild; + Result runResult = run.getResult(); + if (runResult != null) { + this.result = runResult.toString(); + this.isCompleted = runResult.completeBuild; + } else { + this.result = null; + this.isCompleted = false; + } // Set Build Number this.buildNumber = String.valueOf(run.getNumber()); diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java index 5311d2442..1046a6bd9 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java @@ -224,6 +224,9 @@ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext co case "committerDate": committerDate = (String) context.convertAnother(null, String.class); break; + default: + // unknown tag, could be something serialized by a different version of the plugin + break; } reader.moveUp(); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java index d0145c8f5..d9c978936 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java @@ -117,6 +117,9 @@ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext co case "branch": gitRepositoryAction.setBranch((String) context.convertAnother(null, String.class)); break; + default: + // unknown tag, could be something serialized by a different version of the plugin + break; } reader.moveUp(); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java index 8ec2cd3ad..abafd8697 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java @@ -112,6 +112,9 @@ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext co case "workspace": workspace = (String) context.convertAnother(null, String.class); break; + default: + // unknown tag, could be something serialized by a different version of the plugin + break; } reader.moveUp(); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java index 343dfbf47..c7f79e7f5 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java @@ -96,6 +96,9 @@ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext co case "propagatedQueueTimeMillis": propagatedQueueTimeMillis = (long) context.convertAnother(null, long.class); break; + default: + // unknown tag, could be something serialized by a different version of the plugin + break; } reader.moveUp(); } diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java index fcda54a34..9eabab814 100644 --- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java +++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java @@ -115,6 +115,9 @@ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext co case "nodeWorkspace": nodeWorkspace = (String) context.convertAnother(null, String.class); break; + default: + // unknown tag, could be something serialized by a different version of the plugin + break; } reader.moveUp(); }