envVars) {
+ BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class);
+ if (buildSpanAction == null) {
+ return;
+ }
+ String buildUrl = envVars.get("BUILD_URL");
+ if (buildUrl != null) {
+ buildSpanAction.setBuildUrl(buildUrl);
+ }
+ }
+
+ /**
+ * Examine the step's environment to see if it contains any variables that hold git-related data.
+ * It could be variables that are set manually by the pipeline authors (such variables will have {@code DD_} prefix),
+ * or variables that are automatically set by the Jenkins Git Plugin.
+ *
+ * Whatever data we manage to extract, we save in {@link GitCommitAction} that is associated with the pipeline.
+ * It'll later be used to populate git tags both in the pipeline span, and in the spans that correspond to other pipeline steps.
+ *
+ * The reason we examine step environment, rather than checking the pipeline environment (even though the pipeline has its own copy of {@link EnvVars})
+ * is that the pipeline env is minimal and misses many env vars, including the ones that are set manually,
+ * while the step env contains much more data.
+ */
+ private static void updateGitData(Run, ?> run, Map envVars) {
+ GitCommitAction commitAction = run.getAction(GitCommitAction.class);
+ if (commitAction != null) {
+ // Git tag can only be set manually by the user.
+ // Otherwise, Jenkins reports it in the branch.
+ final String gitTag = envVars.get(DD_GIT_TAG);
+ if(gitTag != null){
+ commitAction.setTag(gitTag);
+ }
+
+ final String gitCommit = GitUtils.resolveGitCommit(envVars);
+ if(gitCommit != null) {
+ commitAction.setCommit(gitCommit);
+ }
+
+ // Git data supplied by the user has prevalence. We set them first.
+ // Only the data that has not been set will be updated later.
+ final String ddGitMessage = envVars.get(DD_GIT_COMMIT_MESSAGE);
+ if(ddGitMessage != null) {
+ commitAction.setMessage(ddGitMessage);
+ }
+
+ final String ddGitAuthorName = envVars.get(DD_GIT_COMMIT_AUTHOR_NAME);
+ if(ddGitAuthorName != null) {
+ commitAction.setAuthorName(ddGitAuthorName);
+ }
+
+ final String ddGitAuthorEmail = envVars.get(DD_GIT_COMMIT_AUTHOR_EMAIL);
+ if(ddGitAuthorEmail != null) {
+ commitAction.setAuthorEmail(ddGitAuthorEmail);
+ }
+
+ final String ddGitAuthorDate = envVars.get(DD_GIT_COMMIT_AUTHOR_DATE);
+ if(ddGitAuthorDate != null) {
+ commitAction.setAuthorDate(ddGitAuthorDate);
+ }
+
+ final String ddGitCommitterName = envVars.get(DD_GIT_COMMIT_COMMITTER_NAME);
+ if(ddGitCommitterName != null) {
+ commitAction.setCommitterName(ddGitCommitterName);
+ }
+
+ final String ddGitCommitterEmail = envVars.get(DD_GIT_COMMIT_COMMITTER_EMAIL);
+ if(ddGitCommitterEmail != null) {
+ commitAction.setCommitterEmail(ddGitCommitterEmail);
+ }
+
+ final String ddGitCommitterDate = envVars.get(DD_GIT_COMMIT_COMMITTER_DATE);
+ if(ddGitCommitterDate != null) {
+ commitAction.setCommitterDate(ddGitCommitterDate);
+ }
+ }
+
+ GitRepositoryAction repositoryAction = run.getAction(GitRepositoryAction.class);
+ if (repositoryAction != null) {
+ final String gitUrl = GitUtils.resolveGitRepositoryUrl(envVars);
+ if (gitUrl != null && !gitUrl.isEmpty()) {
+ repositoryAction.setRepositoryURL(gitUrl);
+ }
+
+ final String defaultBranch = GitInfoUtils.normalizeBranch(envVars.get(DD_GIT_DEFAULT_BRANCH));
+ if (defaultBranch != null && !defaultBranch.isEmpty()) {
+ repositoryAction.setDefaultBranch(defaultBranch);
+ }
+
+ final String gitBranch = GitUtils.resolveGitBranch(envVars);
+ if(gitBranch != null && !gitBranch.isEmpty()) {
+ repositoryAction.setBranch(gitBranch);
+ }
+ }
+ }
+
+ private void findStartOfPipeline(final Run,?> run, final NodeInfoAction nodeInfoAction, final FlowNode firstAllocateNodeStart) {
long start = System.currentTimeMillis();
try {
final Iterator blockStartNodes = firstAllocateNodeStart.iterateEnclosingBlocks().iterator();
if(blockStartNodes.hasNext()) {
final FlowNode candidate = blockStartNodes.next();
if("Start of Pipeline".equals(candidate.getDisplayName())) {
- run.addAction(new PipelineNodeInfoAction(stepData.getNodeName() != null ? stepData.getNodeName() : "master", stepData.getNodeLabels(), stepData.getNodeHostname()));
+ run.addOrReplaceAction(new PipelineNodeInfoAction(nodeInfoAction.getNodeName() != null ? nodeInfoAction.getNodeName() : "master", nodeInfoAction.getNodeLabels(), nodeInfoAction.getNodeHostname(), nodeInfoAction.getNodeWorkspace()));
}
}
} finally {
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java
index 82ae7f6a1..06f5a36ae 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/logs/DatadogWriter.java
@@ -31,7 +31,7 @@ of this software and associated documentation files (the "Software"), to deal
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.clients.ClientFactory;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.CITags;
import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
@@ -74,7 +74,7 @@ public void write(String line) {
payload.put("ddsource", "jenkins");
payload.put("service", "jenkins");
payload.put("timestamp", System.currentTimeMillis());
- payload.put(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._NAME, this.buildData.getBaseJobName(""));
+ payload.put(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._NAME, this.buildData.getBaseJobName(""));
// Get Datadog Client Instance
DatadogClient client = ClientFactory.getClient();
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java
index 1f016a157..afd930aa2 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildData.java
@@ -33,11 +33,7 @@ of this software and associated documentation files (the "Software"), to deal
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_COMMITTER_NAME;
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_MESSAGE;
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.GIT_BRANCH;
-import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isCommitInfoAlreadyCreated;
-import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isRepositoryInfoAlreadyCreated;
import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isUserSuppliedGit;
-import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidCommit;
-import static org.datadog.jenkins.plugins.datadog.util.git.GitUtils.isValidRepositoryURL;
import com.cloudbees.plugins.credentials.CredentialsParameterValue;
import hudson.EnvVars;
@@ -48,6 +44,7 @@ of this software and associated documentation files (the "Software"), to deal
import hudson.model.Job;
import hudson.model.ParameterValue;
import hudson.model.ParametersAction;
+import hudson.model.Result;
import hudson.model.Run;
import hudson.model.StringParameterValue;
import hudson.model.TaskListener;
@@ -66,16 +63,17 @@ of this software and associated documentation files (the "Software"), to deal
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
+import javax.annotation.Nullable;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
+import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction;
import org.datadog.jenkins.plugins.datadog.traces.BuildSpanManager;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
-import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings;
import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
import org.datadog.jenkins.plugins.datadog.util.git.GitUtils;
-import org.jenkinsci.plugins.gitclient.GitClient;
+import org.jenkinsci.plugins.workflow.cps.EnvActionImpl;
public class BuildData implements Serializable {
@@ -138,68 +136,106 @@ public class BuildData implements Serializable {
private String traceId;
private String spanId;
- @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE")
- public BuildData(Run run, TaskListener listener) throws IOException, InterruptedException {
+ public BuildData(Run, ?> run, @Nullable TaskListener listener) throws IOException, InterruptedException {
if (run == null) {
return;
}
- EnvVars envVars;
- if(listener != null){
- envVars = run.getEnvironment(listener);
- }else{
- envVars = run.getEnvironment(new LogTaskListener(LOGGER, Level.INFO));
- }
+ EnvVars envVars = getEnvVars(run, listener);
+
+ this.tags = DatadogUtilities.getBuildTags(run, envVars);
- setTags(DatadogUtilities.getBuildTags(run, envVars));
+ this.buildUrl = envVars.get("BUILD_URL");
+ if (buildUrl == null) {
+ BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class);
+ if (buildSpanAction != null) {
+ buildUrl = buildSpanAction.getBuildUrl();
+ }
+ }
// Populate instance using environment variables.
populateEnvVariables(envVars);
// Populate instance using Git info if possible.
// Set all Git commit related variables.
- if(isGit(envVars)){
- populateGitVariables(run, listener, envVars);
- }
+ populateGitVariables(run);
// Populate instance using run instance
// Set StartTime, EndTime and Duration
- long startTimeInMs = run.getStartTimeInMillis();
- setStartTime(startTimeInMs);
+ this.startTime = run.getStartTimeInMillis();
long durationInMs = run.getDuration();
- if (durationInMs == 0 && startTimeInMs != 0) {
- durationInMs = System.currentTimeMillis() - startTimeInMs;
+ if (durationInMs == 0 && run.getStartTimeInMillis() != 0) {
+ durationInMs = System.currentTimeMillis() - run.getStartTimeInMillis();
}
- setDuration(durationInMs);
- if (durationInMs != 0 && startTimeInMs != 0) {
- Long endTimeInMs = startTimeInMs + durationInMs;
- setEndTime(endTimeInMs);
+ this.duration = durationInMs;
+ if (durationInMs != 0 && run.getStartTimeInMillis() != 0) {
+ this.endTime = run.getStartTimeInMillis() + durationInMs;
}
// Set Jenkins Url
- setJenkinsUrl(DatadogUtilities.getJenkinsUrl());
+ this.jenkinsUrl = DatadogUtilities.getJenkinsUrl();
// Set UserId
- setUserId(getUserId(run));
+ this.userId = getUserId(run);
// Set UserEmail
if(StringUtils.isEmpty(getUserEmail(""))){
- setUserEmail(getUserEmailByUserId(getUserId()));
+ this.userEmail = getUserEmailByUserId(getUserId());
}
// Set Result and completed status
- setResult(run.getResult() == null ? null : run.getResult().toString());
- setCompleted(run.getResult() != null && run.getResult().completeBuild);
+ Result runResult = run.getResult();
+ if (runResult != null) {
+ this.result = runResult.toString();
+ this.isCompleted = runResult.completeBuild;
+ } else {
+ this.result = null;
+ this.isCompleted = false;
+ }
// Set Build Number
- setBuildNumber(String.valueOf(run.getNumber()));
- // Set Hostname
- setHostname(DatadogUtilities.getHostname(envVars));
+ this.buildNumber = String.valueOf(run.getNumber());
+
+ final PipelineNodeInfoAction pipelineInfo = run.getAction(PipelineNodeInfoAction.class);
+ if (pipelineInfo != null && pipelineInfo.getNodeName() != null) {
+ this.nodeName = pipelineInfo.getNodeName();
+ } else {
+ this.nodeName = envVars.get("NODE_NAME");
+ }
+
+ if (pipelineInfo != null && pipelineInfo.getNodeHostname() != null) {
+ // using the hostname determined during a pipeline step execution
+ // (this option is only available for pipelines, and not for freestyle builds)
+ this.hostname = pipelineInfo.getNodeHostname();
+ } else if (DatadogUtilities.isMainNode(nodeName)) {
+ // the job is run on the master node, checking plugin config and locally available info.
+ // (nodeName == null) condition is there to preserve existing behavior
+ this.hostname = DatadogUtilities.getHostname(envVars);
+ } else if (envVars.containsKey(DatadogGlobalConfiguration.DD_CI_HOSTNAME)) {
+ // the job is run on an agent node, querying DD_CI_HOSTNAME set explicitly on agent
+ this.hostname = envVars.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME);
+ } else {
+ // the job is run on an agent node, querying HOSTNAME set implicitly on agent
+ this.hostname = envVars.get("HOSTNAME");
+ }
+
+ if (pipelineInfo != null && pipelineInfo.getWorkspace() != null) {
+ this.workspace = pipelineInfo.getWorkspace();
+ } else {
+ this.workspace = envVars.get("WORKSPACE");
+ }
+
+ PipelineQueueInfoAction action = run.getAction(PipelineQueueInfoAction.class);
+ if (action != null) {
+ this.millisInQueue = action.getQueueTimeMillis();
+ this.propagatedMillisInQueue = action.getPropagatedQueueTimeMillis();
+ }
+
// Save charset canonical name
- setCharset(run.getCharset());
+ this.charsetName = run.getCharset().name();
String baseJobName = getBaseJobName(run, envVars);
- setBaseJobName(normalizeJobName(baseJobName));
+ this.baseJobName = normalizeJobName(baseJobName);
String jobNameWithConfiguration = getJobName(run, envVars);
- setJobName(normalizeJobName(jobNameWithConfiguration));
+ this.jobName = normalizeJobName(jobNameWithConfiguration);
// Set Jenkins Url
String jenkinsUrl = DatadogUtilities.getJenkinsUrl();
@@ -207,7 +243,7 @@ public BuildData(Run run, TaskListener listener) throws IOException, Interrupted
&& !envVars.get("JENKINS_URL").isEmpty()) {
jenkinsUrl = envVars.get("JENKINS_URL");
}
- setJenkinsUrl(jenkinsUrl);
+ this.jenkinsUrl = jenkinsUrl;
// Build parameters
populateBuildParameters(run);
@@ -215,14 +251,29 @@ public BuildData(Run run, TaskListener listener) throws IOException, Interrupted
// Set Tracing IDs
final TraceSpan buildSpan = BuildSpanManager.get().get(getBuildTag(""));
if(buildSpan !=null) {
- setTraceId(Long.toUnsignedString(buildSpan.context().getTraceId()));
- setSpanId(Long.toUnsignedString(buildSpan.context().getSpanId()));
+ this.traceId = Long.toUnsignedString(buildSpan.context().getTraceId());
+ this.spanId = Long.toUnsignedString(buildSpan.context().getSpanId());
}
+ }
+
+ private static EnvVars getEnvVars(Run run, TaskListener listener) throws IOException, InterruptedException {
+ EnvVars mergedVars = new EnvVars();
- BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class);
- if (buildSpanAction != null) {
- getMissingGitValuesFrom(buildSpanAction.getBuildData());
+ List envActions = run.getActions(EnvActionImpl.class);
+ for (EnvActionImpl envAction : envActions) {
+ EnvVars environment = envAction.getEnvironment();
+ mergedVars.putAll(environment);
}
+
+ Map envVars;
+ if(listener != null){
+ envVars = run.getEnvironment(listener);
+ }else{
+ envVars = run.getEnvironment(new LogTaskListener(LOGGER, Level.INFO));
+ }
+ mergedVars.putAll(envVars);
+
+ return mergedVars;
}
private static String getBaseJobName(Run run, EnvVars envVars) {
@@ -270,45 +321,6 @@ private static String getJobName(Run run, EnvVars envVars) {
return "unknown";
}
- private void getMissingGitValuesFrom(BuildData previousData) {
- if (branch == null) {
- branch = previousData.branch;
- }
- if (gitUrl == null) {
- gitUrl = previousData.gitUrl;
- }
- if (gitCommit == null) {
- gitCommit = previousData.gitCommit;
- }
- if (gitMessage == null) {
- gitMessage = previousData.gitMessage;
- }
- if (gitAuthorName == null) {
- gitAuthorName = previousData.gitAuthorName;
- }
- if (gitAuthorEmail == null) {
- gitAuthorEmail = previousData.gitAuthorEmail;
- }
- if (gitAuthorDate == null) {
- gitAuthorDate = previousData.gitAuthorDate;
- }
- if (gitCommitterName == null) {
- gitCommitterName = previousData.gitCommitterName;
- }
- if (gitCommitterEmail == null) {
- gitCommitterEmail = previousData.gitCommitterEmail;
- }
- if (gitCommitterDate == null) {
- gitCommitterDate = previousData.gitCommitterDate;
- }
- if (gitDefaultBranch == null) {
- gitDefaultBranch = previousData.gitDefaultBranch;
- }
- if (gitTag == null) {
- gitTag = previousData.gitTag;
- }
- }
-
private void populateBuildParameters(Run,?> run) {
// Build parameters can be defined via Jenkins UI
// or via Jenkinsfile (https://www.jenkins.io/doc/book/pipeline/syntax/#parameters)
@@ -342,106 +354,75 @@ private void populateEnvVariables(EnvVars envVars){
if (envVars == null) {
return;
}
- setBuildId(envVars.get("BUILD_ID"));
- setBuildUrl(envVars.get("BUILD_URL"));
- setNodeName(envVars.get("NODE_NAME"));
+ this.buildId = envVars.get("BUILD_ID");
String envBuildTag = envVars.get("BUILD_TAG");
if (StringUtils.isNotBlank(envBuildTag)) {
- setBuildTag(envBuildTag);
+ this.buildTag = envBuildTag;
} else {
- setBuildTag("jenkins-" + envVars.get("JOB_NAME") + "-" + envVars.get("BUILD_NUMBER"));
+ this.buildTag = "jenkins-" + envVars.get("JOB_NAME") + "-" + envVars.get("BUILD_NUMBER");
}
- setExecutorNumber(envVars.get("EXECUTOR_NUMBER"));
- setJavaHome(envVars.get("JAVA_HOME"));
- setWorkspace(envVars.get("WORKSPACE"));
+ this.executorNumber = envVars.get("EXECUTOR_NUMBER");
+ this.javaHome = envVars.get("JAVA_HOME");
if (isGit(envVars)) {
- setBranch(GitUtils.resolveGitBranch(envVars, null));
- setGitUrl(GitUtils.resolveGitRepositoryUrl(envVars, null));
- setGitCommit(GitUtils.resolveGitCommit(envVars, null));
- setGitTag(GitUtils.resolveGitTag(envVars, null));
+ this.branch = GitUtils.resolveGitBranch(envVars);
+ this.gitUrl = GitUtils.resolveGitRepositoryUrl(envVars);
+ this.gitCommit = GitUtils.resolveGitCommit(envVars);
+ this.gitTag = GitUtils.resolveGitTag(envVars);
// Git data supplied by the user has prevalence. We set them first.
// Only the data that has not been set will be updated later.
// If any value is not provided, we maintained the original value if any.
- setGitMessage(envVars.get(DD_GIT_COMMIT_MESSAGE, this.gitMessage));
- setGitAuthorName(envVars.get(DD_GIT_COMMIT_AUTHOR_NAME, this.gitAuthorName));
- setGitAuthorEmail(envVars.get(DD_GIT_COMMIT_AUTHOR_EMAIL, this.gitAuthorEmail));
- setGitAuthorDate(envVars.get(DD_GIT_COMMIT_AUTHOR_DATE, this.gitAuthorDate));
- setGitCommitterName(envVars.get(DD_GIT_COMMIT_COMMITTER_NAME, this.gitCommitterName));
- setGitCommitterEmail(envVars.get(DD_GIT_COMMIT_COMMITTER_EMAIL, this.gitCommitterEmail));
- setGitCommitterDate(envVars.get(DD_GIT_COMMIT_COMMITTER_DATE, this.gitCommitterDate));
+ this.gitMessage = envVars.get(DD_GIT_COMMIT_MESSAGE, this.gitMessage);
+ this.gitAuthorName = envVars.get(DD_GIT_COMMIT_AUTHOR_NAME, this.gitAuthorName);
+ this.gitAuthorEmail = envVars.get(DD_GIT_COMMIT_AUTHOR_EMAIL, this.gitAuthorEmail);
+ this.gitAuthorDate = envVars.get(DD_GIT_COMMIT_AUTHOR_DATE, this.gitAuthorDate);
+ this.gitCommitterName = envVars.get(DD_GIT_COMMIT_COMMITTER_NAME, this.gitCommitterName);
+ this.gitCommitterEmail = envVars.get(DD_GIT_COMMIT_COMMITTER_EMAIL, this.gitCommitterEmail);
+ this.gitCommitterDate = envVars.get(DD_GIT_COMMIT_COMMITTER_DATE, this.gitCommitterDate);
} else if (envVars.get("CVS_BRANCH") != null) {
- setBranch(envVars.get("CVS_BRANCH"));
+ this.branch = envVars.get("CVS_BRANCH");
}
- setPromotedUrl(envVars.get("PROMOTED_URL"));
- setPromotedJobName(envVars.get("PROMOTED_JOB_NAME"));
- setPromotedNumber(envVars.get("PROMOTED_NUMBER"));
- setPromotedId(envVars.get("PROMOTED_ID"));
- setPromotedTimestamp(envVars.get("PROMOTED_TIMESTAMP"));
- setPromotedUserName(envVars.get("PROMOTED_USER_NAME"));
- setPromotedUserId(envVars.get("PROMOTED_USER_ID"));
- setPromotedJobFullName(envVars.get("PROMOTED_JOB_FULL_NAME"));
+ this.promotedUrl = envVars.get("PROMOTED_URL");
+ this.promotedJobName = envVars.get("PROMOTED_JOB_NAME");
+ this.promotedNumber = envVars.get("PROMOTED_NUMBER");
+ this.promotedId = envVars.get("PROMOTED_ID");
+ this.promotedTimestamp = envVars.get("PROMOTED_TIMESTAMP");
+ this.promotedUserName = envVars.get("PROMOTED_USER_NAME");
+ this.promotedUserId = envVars.get("PROMOTED_USER_ID");
+ this.promotedJobFullName = envVars.get("PROMOTED_JOB_FULL_NAME");
}
-
/**
* Populate git commit related information in the BuildData instance.
- * @param run
- * @param listener
- * @param envVars
+ * The data is retrieved from {@link GitRepositoryAction} and {@link GitCommitAction} that are associated with the build.
+ * The actions are populated from two main sources:
+ *
+ * - Environment variables of specific pipeline steps:
+ * pipeline object has its own set of env variables, but it is minimal;
+ * the whole set of env variables
+ * (including those that are set by Jenkins Git Plugin or manually by the pipeline authors)
+ * is only available for individual pipeline steps.
+ * That is why in {@link org.datadog.jenkins.plugins.datadog.listeners.DatadogStepListener}
+ * we examine the full set of env vars to see if we can extract any git-related info
+ * - Git repositories that were checked out during pipeline execution:
+ * {@link org.datadog.jenkins.plugins.datadog.listeners.DatadogSCMListener} is notified of every source-code checkout.
+ * If the checked out repo is a git repository, we create a git client and examine repository metadata
+ *
*/
- private void populateGitVariables(Run,?> run, TaskListener listener, EnvVars envVars) {
- // First we obtain the actions to check if the Git information was already calculated.
- // If so, we want to use this information to avoid creating a new Git client instance
- // to calculate the same information.
-
- boolean commitInfoAlreadyCreated = isCommitInfoAlreadyCreated(run, this.gitCommit);
- boolean repositoryInfoAlreadyCreated = isRepositoryInfoAlreadyCreated(run, this.gitUrl);
-
+ private void populateGitVariables(Run,?> run) {
GitRepositoryAction gitRepositoryAction = run.getAction(GitRepositoryAction.class);
- if(repositoryInfoAlreadyCreated){
- this.gitDefaultBranch = gitRepositoryAction.getDefaultBranch();
- }
-
- final GitCommitAction gitCommitAction = run.getAction(GitCommitAction.class);
- if(commitInfoAlreadyCreated){
- populateCommitInfo(gitCommitAction);
- }
-
- // If all Git info was already calculated, we finish the method here.
- if(repositoryInfoAlreadyCreated && commitInfoAlreadyCreated) {
- return;
- }
-
- // At this point, there is some Git information that we need to calculate.
- // We use the same Git client instance to calculate all git information
- // because creating a Git client is a very expensive operation.
- // Create a new Git client is a very expensive operation.
- // Avoid creating Git clients as much as possible.
- if(!isValidCommit(gitCommit) && !isValidRepositoryURL(this.gitUrl)) {
- return;
- }
+ populateRepositoryInfo(gitRepositoryAction);
- final GitClient gitClient = GitUtils.newGitClient(run, listener, envVars, this.nodeName, this.workspace);
- if(isValidCommit(this.gitCommit)){
- populateCommitInfo(GitUtils.buildGitCommitAction(run, gitClient, this.gitCommit));
- }
-
- if(isValidRepositoryURL(this.gitUrl)){
- gitRepositoryAction = GitUtils.buildGitRepositoryAction(run, gitClient, envVars, this.gitUrl);
- if(gitRepositoryAction != null) {
- this.gitDefaultBranch = gitRepositoryAction.getDefaultBranch();
- }
- }
+ GitCommitAction gitCommitAction = run.getAction(GitCommitAction.class);
+ populateCommitInfo(gitCommitAction);
}
/**
* Populate the information related to the commit (message, author and committer) based on the GitCommitAction
* only if the user has not set the value manually.
- * @param gitCommitAction
*/
private void populateCommitInfo(GitCommitAction gitCommitAction) {
if(gitCommitAction != null) {
@@ -449,32 +430,67 @@ private void populateCommitInfo(GitCommitAction gitCommitAction) {
// the user supplied the value manually
// via environment variables.
+ String existingCommit = getGitCommit("");
+ if (!existingCommit.isEmpty() && !existingCommit.equals(gitCommitAction.getCommit())) {
+ // user-supplied commit is different
+ return;
+ }
+
+ if(existingCommit.isEmpty()){
+ this.gitCommit = gitCommitAction.getCommit();
+ }
+
+ if(getGitTag("").isEmpty()){
+ this.gitTag = gitCommitAction.getTag();
+ }
+
if(getGitMessage("").isEmpty()){
- setGitMessage(gitCommitAction.getMessage());
+ this.gitMessage = gitCommitAction.getMessage();
}
if(getGitAuthorName("").isEmpty()){
- setGitAuthorName(gitCommitAction.getAuthorName());
+ this.gitAuthorName = gitCommitAction.getAuthorName();
}
if(getGitAuthorEmail("").isEmpty()) {
- setGitAuthorEmail(gitCommitAction.getAuthorEmail());
+ this.gitAuthorEmail = gitCommitAction.getAuthorEmail();
}
if(getGitAuthorDate("").isEmpty()){
- setGitAuthorDate(gitCommitAction.getAuthorDate());
+ this.gitAuthorDate = gitCommitAction.getAuthorDate();
}
if(getGitCommitterName("").isEmpty()){
- setGitCommitterName(gitCommitAction.getCommitterName());
+ this.gitCommitterName = gitCommitAction.getCommitterName();
}
if(getGitCommitterEmail("").isEmpty()){
- setGitCommitterEmail(gitCommitAction.getCommitterEmail());
+ this.gitCommitterEmail = gitCommitAction.getCommitterEmail();
}
if(getGitCommitterDate("").isEmpty()){
- setGitCommitterDate(gitCommitAction.getCommitterDate());
+ this.gitCommitterDate = gitCommitAction.getCommitterDate();
+ }
+ }
+ }
+
+ private void populateRepositoryInfo(GitRepositoryAction gitRepositoryAction) {
+ if (gitRepositoryAction != null) {
+ if (gitUrl != null && !gitUrl.isEmpty() && !gitUrl.equals(gitRepositoryAction.getRepositoryURL())) {
+ // user-supplied URL is different
+ return;
+ }
+
+ if (gitUrl == null || gitUrl.isEmpty()) {
+ gitUrl = gitRepositoryAction.getRepositoryURL();
+ }
+
+ if (gitDefaultBranch == null || gitDefaultBranch.isEmpty()) {
+ gitDefaultBranch = gitRepositoryAction.getDefaultBranch();
+ }
+
+ if (branch == null || branch.isEmpty()) {
+ this.branch = gitRepositoryAction.getBranch();
}
}
}
@@ -558,35 +574,18 @@ public String getJobName(String value) {
return defaultIfNull(jobName, value);
}
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
-
public String getBaseJobName(String value) {
return defaultIfNull(baseJobName, value);
}
- public void setBaseJobName(String baseJobName) {
- this.baseJobName = baseJobName;
- }
-
public String getResult(String value) {
return defaultIfNull(result, value);
}
- public void setResult(String result) {
- this.result = result;
- }
-
public boolean isCompleted() {
return isCompleted;
}
- public void setCompleted(boolean completed) {
- this.isCompleted = completed;
- }
-
-
public String getHostname(String value) {
return defaultIfNull(hostname, value);
}
@@ -599,10 +598,6 @@ public String getBuildUrl(String value) {
return defaultIfNull(buildUrl, value);
}
- public void setBuildUrl(String buildUrl) {
- this.buildUrl = buildUrl;
- }
-
public Charset getCharset() {
if (charsetName != null) {
// Will throw an exception if there is an issue with
@@ -612,12 +607,6 @@ public Charset getCharset() {
return Charset.defaultCharset();
}
- public void setCharset(Charset charset) {
- if (charset != null) {
- this.charsetName = charset.name();
- }
- }
-
public Map getBuildParameters() {
return this.buildParameters;
}
@@ -626,82 +615,38 @@ public String getNodeName(String value) {
return defaultIfNull(nodeName, value);
}
- public void setNodeName(String nodeName) {
- this.nodeName = nodeName;
- }
-
public String getBranch(String value) {
return defaultIfNull(branch, value);
}
- public void setBranch(String branch) {
- this.branch = branch;
- }
-
public String getBuildNumber(String value) {
return defaultIfNull(buildNumber, value);
}
- public void setBuildNumber(String buildNumber) {
- this.buildNumber = buildNumber;
- }
-
public Long getDuration(Long value) {
return defaultIfNull(duration, value);
}
- public void setDuration(Long duration) {
- this.duration = duration;
- }
-
public Long getEndTime(Long value) {
return defaultIfNull(endTime, value);
}
- public void setEndTime(Long endTime) {
- this.endTime = endTime;
- }
-
public Long getStartTime(Long value) {
return defaultIfNull(startTime, value);
}
- public void setStartTime(Long startTime) {
- this.startTime = startTime;
- }
-
public Long getMillisInQueue(Long value) {
return defaultIfNull(millisInQueue, value);
}
- public void setMillisInQueue(Long millisInQueue) {
- this.millisInQueue = millisInQueue;
- }
-
public Long getPropagatedMillisInQueue(Long value) {
return defaultIfNull(propagatedMillisInQueue, value);
}
- public void setPropagatedMillisInQueue(Long propagatedMillisInQueue) {
- this.propagatedMillisInQueue = propagatedMillisInQueue;
- }
-
- public String getBuildId(String value) {
- return defaultIfNull(buildId, value);
- }
-
- public void setBuildId(String buildId) {
- this.buildId = buildId;
- }
-
public String getBuildTag(String value) {
return defaultIfNull(buildTag, value);
}
- public void setBuildTag(String buildTag) {
- this.buildTag = buildTag;
- }
-
public String getJenkinsUrl(String value) {
return defaultIfNull(jenkinsUrl, value);
}
@@ -714,186 +659,58 @@ public String getExecutorNumber(String value) {
return defaultIfNull(executorNumber, value);
}
- public void setExecutorNumber(String executorNumber) {
- this.executorNumber = executorNumber;
- }
-
- public String getJavaHome(String value) {
- return defaultIfNull(javaHome, value);
- }
-
- public void setJavaHome(String javaHome) {
- this.javaHome = javaHome;
- }
-
public String getWorkspace(String value) {
return defaultIfNull(workspace, value);
}
- public void setWorkspace(String workspace) {
- this.workspace = workspace;
- }
-
public String getGitUrl(String value) {
return defaultIfNull(gitUrl, value);
}
- public void setGitUrl(String gitUrl) {
- this.gitUrl = gitUrl;
- }
-
public String getGitCommit(String value) {
return defaultIfNull(gitCommit, value);
}
- public void setGitCommit(String gitCommit) {
- this.gitCommit = gitCommit;
- }
-
public String getGitMessage(String value) {
return defaultIfNull(gitMessage, value);
}
- public void setGitMessage(String gitMessage) {
- this.gitMessage = gitMessage;
- }
-
public String getGitAuthorName(final String value) {
return defaultIfNull(gitAuthorName, value);
}
- public void setGitAuthorName(String gitAuthorName) {
- this.gitAuthorName = gitAuthorName;
- }
-
public String getGitAuthorEmail(final String value) {
return defaultIfNull(gitAuthorEmail, value);
}
- public void setGitAuthorEmail(String gitAuthorEmail) {
- this.gitAuthorEmail = gitAuthorEmail;
- }
-
public String getGitCommitterName(final String value) {
return defaultIfNull(gitCommitterName, value);
}
- public void setGitCommitterName(String gitCommitterName) {
- this.gitCommitterName = gitCommitterName;
- }
-
public String getGitCommitterEmail(final String value) {
return defaultIfNull(gitCommitterEmail, value);
}
- public void setGitCommitterEmail(String gitCommitterEmail) {
- this.gitCommitterEmail = gitCommitterEmail;
- }
-
public String getGitAuthorDate(final String value) {
return defaultIfNull(gitAuthorDate, value);
}
- public void setGitAuthorDate(String gitAuthorDate) {
- this.gitAuthorDate = gitAuthorDate;
- }
-
public String getGitCommitterDate(final String value) {
return defaultIfNull(gitCommitterDate, value);
}
- public void setGitCommitterDate(String gitCommitterDate) {
- this.gitCommitterDate = gitCommitterDate;
- }
-
public String getGitDefaultBranch(String value) {
return defaultIfNull(gitDefaultBranch, value);
}
- public void setGitDefaultBranch(String gitDefaultBranch) {
- this.gitDefaultBranch = gitDefaultBranch;
- }
-
public String getGitTag(String value) {
return defaultIfNull(gitTag, value);
}
- public void setGitTag(String gitTag) {
- this.gitTag = gitTag;
- }
-
- public String getPromotedUrl(String value) {
- return defaultIfNull(promotedUrl, value);
- }
-
- public void setPromotedUrl(String promotedUrl) {
- this.promotedUrl = promotedUrl;
- }
-
- public String getPromotedJobName(String value) {
- return defaultIfNull(promotedJobName, value);
- }
-
- public void setPromotedJobName(String promotedJobName) {
- this.promotedJobName = promotedJobName;
- }
-
- public String getPromotedNumber(String value) {
- return defaultIfNull(promotedNumber, value);
- }
-
- public void setPromotedNumber(String promotedNumber) {
- this.promotedNumber = promotedNumber;
- }
-
- public String getPromotedId(String value) {
- return defaultIfNull(promotedId, value);
- }
-
- public void setPromotedId(String promotedId) {
- this.promotedId = promotedId;
- }
-
- public String getPromotedTimestamp(String value) {
- return defaultIfNull(promotedTimestamp, value);
- }
-
- public void setPromotedTimestamp(String promotedTimestamp) {
- this.promotedTimestamp = promotedTimestamp;
- }
-
- public String getPromotedUserName(String value) {
- return defaultIfNull(promotedUserName, value);
- }
-
- public void setPromotedUserName(String promotedUserName) {
- this.promotedUserName = promotedUserName;
- }
-
- public String getPromotedUserId(String value) {
- return defaultIfNull(promotedUserId, value);
- }
-
- public void setPromotedUserId(String promotedUserId) {
- this.promotedUserId = promotedUserId;
- }
-
- public String getPromotedJobFullName(String value) {
- return defaultIfNull(promotedJobFullName, value);
- }
-
- public void setPromotedJobFullName(String promotedJobFullName) {
- this.promotedJobFullName = promotedJobFullName;
- }
-
public String getUserId() {
return userId;
}
- public void setUserId(String userId) {
- this.userId = userId;
- }
-
private String getUserId(Run run) {
if (promotedUserId != null){
return promotedUserId;
@@ -972,18 +789,6 @@ private String getUserEmailByUserId(String userId) {
}
}
- public void setUserEmail(final String userEmail) {
- this.userEmail = userEmail;
- }
-
- public void setTraceId(String traceId) {
- this.traceId = traceId;
- }
-
- public void setSpanId(String spanId) {
- this.spanId = spanId;
- }
-
public JSONObject addLogAttributes(){
JSONObject payload = new JSONObject();
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java
deleted file mode 100644
index c13f7ea95..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipeline.java
+++ /dev/null
@@ -1,230 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import static org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode.BuildPipelineNodeKey;
-
-import org.datadog.jenkins.plugins.datadog.traces.CITags;
-import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode;
-import org.jenkinsci.plugins.workflow.graph.BlockEndNode;
-import org.jenkinsci.plugins.workflow.graph.BlockStartNode;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Represents a Jenkins Pipeline.
- * The stages are represented using an n-ary tree.
- */
-public class BuildPipeline {
-
- private final Map, BuildPipelineNode> stagesByPath;
- private BuildPipelineNode root;
-
- public BuildPipeline() {
- this.stagesByPath = new HashMap<>();
- this.root = new BuildPipelineNode("initial", "initial");
- }
-
- public BuildPipelineNode add(final FlowNode node) {
- final BuildPipelineNode buildNode = buildPipelineNode(node);
- if(buildNode == null) {
- return null;
- }
-
- final List buildNodeRelations = new ArrayList<>();
- buildNodeRelations.add(buildNode.getKey());
- for (final BlockStartNode startNode : node.iterateEnclosingBlocks()) {
- buildNodeRelations.add(new BuildPipelineNodeKey(startNode.getId(), startNode.getDisplayName()));
- }
-
- Collections.reverse(buildNodeRelations);
- return stagesByPath.put(buildNodeRelations, buildNode);
- }
-
- private BuildPipelineNode buildPipelineNode(FlowNode node) {
- if(node instanceof BlockEndNode) {
- return new BuildPipelineNode((BlockEndNode) node);
- } else if(node instanceof StepAtomNode) {
- return new BuildPipelineNode((StepAtomNode) node);
- }
- return null;
- }
-
- /**
- * Reconstruct a Jenkins pipeline tree from the info gathered in the {@code DatadogGraphListener}.
- * Example:
- * Starting from the stagesByPath:
- * Key: (Stage1, Stage2) - Value: (Stage2)
- * Key: (Stage1, Stage2, Stage3) - Value: (Stage3)
- * Key: (Stage1) - Value: (Stage1)
- * it will be returned the following tree:
- * root
- * -- stage1
- * -- stage2
- * -- stage3
- * @return the build pipeline tree.
- **/
- public BuildPipelineNode buildTree() {
- for(Map.Entry, BuildPipelineNode> entry : stagesByPath.entrySet()){
- final List pathStages = entry.getKey();
- final BuildPipelineNode stage = entry.getValue();
- buildTree(pathStages, root, stage);
- }
-
- sortSiblingsByStartTime(root.getChildren());
- completeInformation(root.getChildren(), root);
- assignPipelineToRootNode(root);
- return root;
- }
-
- private void assignPipelineToRootNode(BuildPipelineNode root) {
- final List children = root.getChildren();
- if(children.size() == 1) {
- this.root = children.get(0);
- }
- }
-
- private void sortSiblingsByStartTime(List stages) {
- for(BuildPipelineNode stage : stages) {
- sortSiblingsByStartTime(stage.getChildren());
- }
- stages.sort(new BuildPipelineNode.BuildPipelineNodeComparator());
- }
-
- private void completeInformation(final List nodes, final BuildPipelineNode parent) {
- for(int i = 0; i < nodes.size(); i++) {
- final BuildPipelineNode node = nodes.get(i);
- final Long endTime = node.getEndTime();
- if(endTime == -1L) {
- if(i + 1 < nodes.size()) {
- final BuildPipelineNode sibling = nodes.get(i + 1);
- node.setEndTime(sibling.getStartTime());
- } else {
- node.setEndTime(parent.getEndTime());
- }
- }
-
- // Propagate Stage Name to its children
- if(!BuildPipelineNode.NodeType.STAGE.equals(node.getType())) {
- if(BuildPipelineNode.NodeType.STAGE.equals(parent.getType())) {
- node.setStageName(parent.getName());
- node.setStageId(parent.getId());
- } else if(parent.getStageName() != null){
- node.setStageName(parent.getStageName());
- node.setStageId(parent.getStageId());
- }
- }
-
- // Propagate queue time from "Allocate node" child:
- // If the node is the initial (Start of Pipeline) or is a Stage,
- // we need to propagate the queue time stored in its child node ("Allocate node").
- // This is necessary because the stage/pipeline node does not have the queue time itself,
- // but it's stored in the "Allocate node" which is its child.
- if((node.isInitial() || BuildPipelineNode.NodeType.STAGE.equals(node.getType())) && node.getChildren().size() == 1){
- BuildPipelineNode child = node.getChildren().get(0);
- if(child.getName().contains("Allocate node")) {
- node.setPropagatedNanosInQueue(child.getNanosInQueue());
- }
- }
-
-
- // Propagate worker node name from the executable child node
- // (where the worker node info is available) to its stage.
- if(BuildPipelineNode.NodeType.STAGE.equals(node.getType())) {
- final BuildPipelineNode executableChildNode = searchExecutableChildNode(node);
- if(executableChildNode != null) {
- node.setNodeName(executableChildNode.getNodeName());
- node.setNodeLabels(executableChildNode.getNodeLabels());
- node.setNodeHostname(executableChildNode.getNodeHostname());
- }
- }
-
- // Propagate error to all parent stages
- if(node.isError() && !parent.isError()) {
- propagateResultToAllParents(node, CITags.STATUS_ERROR, false);
- } else if(node.isUnstable() && !parent.isUnstable()) {
- propagateResultToAllParents(node, CITags.STATUS_UNSTABLE, false);
- }
-
- // Notice we cannot propagate the worker node info
- // to the root span at this point, because this method is executed
- // after the root span is sent. To propagate worker node info
- // to the root span, we use the PipelineNodeInfoAction that is populated
- // in the DatadogStepListener class.
-
- completeInformation(node.getChildren(), node);
- }
- }
-
- private void propagateResultToAllParents(BuildPipelineNode node, String result, boolean stopAtFirstNonInternalNode) {
- // propagating "error" status is different from propagating "unstable",
- // since error can be caught and suppressed
- if (CITags.STATUS_ERROR.equals(result)) {
- if (node.getCatchErrorResult() != null) {
- // encountered a "catchError" or a "warnError" block;
- // will propagate the updated result to the first visible (non-internal) node, and then stop
- result = node.getCatchErrorResult();
- stopAtFirstNonInternalNode = true;
- } else if (node.getErrorObj() == null && !stopAtFirstNonInternalNode) {
- // most likely a "catch" block in a scripted pipeline
- return;
- }
- }
-
- node.setResult(result);
-
- if (!node.isInternal() && stopAtFirstNonInternalNode) {
- return;
- }
-
- for(BuildPipelineNode parent : node.getParents()) {
- propagateResultToAllParents(parent, result, stopAtFirstNonInternalNode);
- }
- }
-
- private BuildPipelineNode searchExecutableChildNode(BuildPipelineNode node) {
- if(!node.isInternal() && BuildPipelineNode.NodeType.STEP.equals(node.getType())){
- return node;
- }else if ("Stage : Start".equalsIgnoreCase(node.getName())) {
- // If we find a "Stage : Start" as child, we need to stop searching
- // because we're changing the Stage, so the executable child node
- // will not belong to the required stage.
- return null;
- } else {
- for(BuildPipelineNode child : node.getChildren()){
- final BuildPipelineNode found = searchExecutableChildNode(child);
- if(found != null) {
- return found;
- }
- }
- }
- return null;
- }
-
- private void buildTree(List pathStages, BuildPipelineNode parent, BuildPipelineNode stage) {
- if(pathStages.isEmpty()) {
- return;
- }
-
- final BuildPipelineNodeKey buildNodeKey = pathStages.get(0);
- if(pathStages.size() == 1){
- final BuildPipelineNode child = parent.getChild(buildNodeKey);
- if (child == null) {
- parent.addChild(stage);
- } else {
- child.updateData(stage);
- }
-
- } else {
- BuildPipelineNode child = parent.getChild(buildNodeKey);
- if(child == null) {
- child = new BuildPipelineNode(buildNodeKey);
- parent.addChild(child);
- }
- buildTree(pathStages.subList(1, pathStages.size()), child, stage);
- }
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java
deleted file mode 100644
index e77973bb2..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/BuildPipelineNode.java
+++ /dev/null
@@ -1,568 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import hudson.console.AnnotatedLargeText;
-import hudson.model.Run;
-import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
-import org.datadog.jenkins.plugins.datadog.traces.CITags;
-import org.datadog.jenkins.plugins.datadog.traces.StepDataAction;
-import org.datadog.jenkins.plugins.datadog.traces.StepTraceDataAction;
-import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings;
-import org.jenkinsci.plugins.workflow.actions.ArgumentsAction;
-import org.jenkinsci.plugins.workflow.actions.ErrorAction;
-import org.jenkinsci.plugins.workflow.actions.LogAction;
-import org.jenkinsci.plugins.workflow.actions.TimingAction;
-import org.jenkinsci.plugins.workflow.actions.WarningAction;
-import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode;
-import org.jenkinsci.plugins.workflow.graph.BlockEndNode;
-import org.jenkinsci.plugins.workflow.graph.BlockStartNode;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
-import org.jenkinsci.plugins.workflow.graph.StepNode;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-import java.util.logging.Logger;
-
-/**
- * Represent a stage of the Jenkins Pipeline.
- */
-public class BuildPipelineNode {
-
- private static final Logger logger = Logger.getLogger(BuildPipelineNode.class.getName());
-
- public enum NodeType {
- PIPELINE("ci.pipeline", "pipeline"),
- STAGE("ci.stage", "stage"),
- STEP("ci.job", "job");
-
- private final String tagName;
- private final String buildLevel;
-
- NodeType(final String tagName, final String buildLevel) {
- this.tagName = tagName;
- this.buildLevel = buildLevel;
- }
-
- public String getTagName() {
- return tagName;
- }
-
- public String getBuildLevel() {
- return buildLevel;
- }
- }
-
- private final BuildPipelineNodeKey key;
- private final List parents;
- private final List children;
- private final String id;
- private final String name;
- private String stageId;
- private String stageName;
-
- private NodeType type;
- private boolean internal;
- private boolean initial;
- private Map args = new HashMap<>();
- private Map envVars = new HashMap<>();
- private String workspace;
- private String nodeName;
- private Set nodeLabels;
- private String nodeHostname;
- private long startTime;
- private long startTimeMicros;
- private long endTime;
- private long endTimeMicros;
- private long nanosInQueue = -1L;
- private long propagatedNanosInQueue = -1L;
- private String result;
-
- // If the node is a `catchError` block, this field will contain the `stageResult` parameter
- private String catchErrorResult;
- // Throwable of the node.
- // Although the error flag was true, this can be null.
- private Throwable errorObj;
- private String unstableMessage;
-
- //Tracing
- private long spanId = -1L;
-
- public BuildPipelineNode(final String id, final String name) {
- this(new BuildPipelineNodeKey(id, name));
- }
-
- public BuildPipelineNode(final BuildPipelineNodeKey key) {
- this.key = key;
- this.parents = new ArrayList<>();
- this.children = new ArrayList<>();
- this.id = key.id;
- this.name = key.name;
- }
-
- public BuildPipelineNode(final BlockEndNode endNode) {
- final BlockStartNode startNode = endNode.getStartNode();
- this.key = new BuildPipelineNodeKey(startNode.getId(), startNode.getDisplayName());
- this.parents = new ArrayList<>();
- this.children = new ArrayList<>();
-
- this.id = startNode.getId();
- this.name = startNode.getDisplayName();
- if(DatadogUtilities.isPipelineNode(endNode)) {
- // The pipeline node must be treated as Step.
- // Only root span must have ci.pipeline.* tags.
- // https://datadoghq.atlassian.net/browse/CIAPP-190
- // The pipeline node is not the root span.
- // In Jenkins, the build span is the root span, and
- // the pipeline node span is a child of the build span.
- this.type = NodeType.STEP;
- this.internal = true;
- this.initial = true;
- } else if(DatadogUtilities.isStageNode(startNode)){
- this.type = NodeType.STAGE;
- this.internal = false;
- } else{
- this.type = NodeType.STEP;
- this.internal = true;
- }
-
- this.catchErrorResult = DatadogUtilities.getCatchErrorResult(startNode);
- this.args = ArgumentsAction.getFilteredArguments(startNode);
-
- if(endNode instanceof StepNode){
- final StepData stepData = getStepData(startNode);
- if(stepData != null) {
- this.envVars = stepData.getEnvVars();
- this.workspace = stepData.getWorkspace();
- this.nodeName = stepData.getNodeName();
- this.nodeHostname = stepData.getNodeHostname();
- this.nodeLabels = stepData.getNodeLabels();
- }
- }
-
- final FlowNodeQueueData queueData = getQueueData(startNode);
- if(queueData != null) {
- this.nanosInQueue = queueData.getNanosInQueue();
- }
-
- this.startTime = getTime(startNode);
- this.startTimeMicros = this.startTime * 1000;
- this.endTime = getTime(endNode);
- this.endTimeMicros = this.endTime * 1000;
- this.result = DatadogUtilities.getResultTag(startNode);
- this.errorObj = getErrorObj(endNode);
- this.unstableMessage = getUnstableMessage(startNode);
- }
-
- public BuildPipelineNode(final StepAtomNode stepNode) {
- this.key = new BuildPipelineNodeKey(stepNode.getId(), stepNode.getDisplayName());
- this.parents = new ArrayList<>();
- this.children = new ArrayList<>();
- this.internal = false;
- this.id = stepNode.getId();
- this.name = stepNode.getDisplayName();
- this.type = NodeType.STEP;
- this.args = ArgumentsAction.getFilteredArguments(stepNode);
-
- final StepData stepData = getStepData(stepNode);
- if(stepData != null) {
- this.envVars = stepData.getEnvVars();
- this.workspace = stepData.getWorkspace();
- this.nodeName = stepData.getNodeName();
- this.nodeHostname = stepData.getNodeHostname();
- this.nodeLabels = stepData.getNodeLabels();
- }
-
- final StepTraceData stepTraceData = getStepTraceData(stepNode);
- if(stepTraceData != null) {
- this.spanId = stepTraceData.getSpanId();
- }
-
- final FlowNodeQueueData queueData = getQueueData(stepNode);
- if(queueData != null) {
- this.nanosInQueue = queueData.getNanosInQueue();
- }
-
- this.startTime = getTime(stepNode);
- this.startTimeMicros = this.startTime * 1000;
- this.endTime = -1L;
- this.endTimeMicros = this.endTime * 1000;
- this.result = DatadogUtilities.getResultTag(stepNode);
- this.errorObj = getErrorObj(stepNode);
- this.unstableMessage = getUnstableMessage(stepNode);
- }
-
-
- public BuildPipelineNodeKey getKey() {
- return key;
- }
-
- public String getId() {
- return id;
- }
-
- public String getName() {
- return name;
- }
-
- public String getStageId() {
- return stageId;
- }
-
- public void setStageId(String stageId) {
- this.stageId = stageId;
- }
-
- public String getStageName() {
- return stageName;
- }
-
- public void setStageName(String stageName) {
- this.stageName = stageName;
- }
-
- public boolean isInternal() {
- return internal;
- }
-
- public boolean isInitial() {
- return initial;
- }
-
- public Map getArgs() {
- return args;
- }
-
- public Map getEnvVars() {
- return envVars;
- }
-
- public String getWorkspace() {
- return workspace;
- }
-
- public String getNodeName() {
- return nodeName;
- }
-
- public Set getNodeLabels() {
- return nodeLabels;
- }
-
- public void setNodeName(String propagatedNodeName) {
- this.nodeName = propagatedNodeName;
- }
-
- public void setNodeLabels(final Set propagatedNodeLabels) {
- this.nodeLabels = propagatedNodeLabels;
- }
-
- public String getNodeHostname() {
- return nodeHostname;
- }
-
- public void setNodeHostname(final String propagatedNodeHostname) {
- this.nodeHostname = propagatedNodeHostname;
- }
-
- public long getStartTime() {
- return startTime;
- }
-
- public long getEndTime() {
- return endTime;
- }
-
- public long getStartTimeMicros() {
- return startTimeMicros;
- }
-
- public long getEndTimeMicros() {
- return endTimeMicros;
- }
-
- public long getNanosInQueue() {
- return nanosInQueue;
- }
-
- public void setEndTime(long endTime) {
- this.endTime = endTime;
- this.endTimeMicros = TimeUnit.MILLISECONDS.toMicros(this.endTime);
- }
-
- public void setNanosInQueue(long nanosInQueue) {
- this.nanosInQueue = nanosInQueue;
- }
-
- public long getPropagatedNanosInQueue() {
- return propagatedNanosInQueue;
- }
-
- public void setPropagatedNanosInQueue(long propagatedNanosInQueue) {
- this.propagatedNanosInQueue = propagatedNanosInQueue;
- }
-
- public String getResult() {
- return result;
- }
-
- public void setResult(final String propagatedResult) {
- this.result = propagatedResult;
- }
-
- public Throwable getErrorObj() {
- return errorObj;
- }
-
- public String getUnstableMessage() {
- return unstableMessage;
- }
-
- public boolean isError() {
- return CITags.STATUS_ERROR.equalsIgnoreCase(this.result);
- }
-
- public boolean isUnstable() {
- return CITags.STATUS_UNSTABLE.equalsIgnoreCase(this.result);
- }
-
- public long getSpanId() {
- return spanId;
- }
-
- public List getParents(){ return parents; }
-
- public List getChildren() {
- return children;
- }
-
- public BuildPipelineNode getChild(final BuildPipelineNodeKey id) {
- if(children.isEmpty()) {
- return null;
- }
-
- for(final BuildPipelineNode child : children) {
- if(id.equals(child.getKey())){
- return child;
- }
- }
-
- return null;
- }
-
- public NodeType getType() {
- return type;
- }
-
- public String getCatchErrorResult() {
- return catchErrorResult;
- }
-
- // Used during the tree is being built in BuildPipeline class.
- public void updateData(final BuildPipelineNode buildNode) {
- this.stageName = buildNode.stageName;
- this.stageId = buildNode.stageId;
- this.type = buildNode.type;
- this.internal = buildNode.internal;
- this.initial = buildNode.initial;
- this.args = buildNode.args;
- this.envVars = buildNode.envVars;
- this.workspace = buildNode.workspace;
- this.nodeName = buildNode.nodeName;
- this.nodeHostname = buildNode.nodeHostname;
- this.nodeLabels = buildNode.nodeLabels;
- this.startTime = buildNode.startTime;
- this.startTimeMicros = buildNode.startTimeMicros;
- this.endTime = buildNode.endTime;
- this.endTimeMicros = buildNode.endTimeMicros;
- this.nanosInQueue = buildNode.nanosInQueue;
- this.result = buildNode.result;
- this.catchErrorResult = buildNode.catchErrorResult;
- this.errorObj = buildNode.errorObj;
- this.unstableMessage = buildNode.unstableMessage;
- this.parents.addAll(buildNode.parents);
- this.spanId = buildNode.spanId;
- }
-
- public void addChild(final BuildPipelineNode child) {
- children.add(child);
- child.parents.add(this);
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- BuildPipelineNode that = (BuildPipelineNode) o;
- return Objects.equals(key, that.key);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(key);
- }
-
-
- /**
- * Returns the startTime of a certain {@code FlowNode}, if it has time information.
- * @param flowNode
- * @return startTime of the flowNode in milliseconds.
- */
- private static long getTime(FlowNode flowNode) {
- TimingAction time = flowNode.getAction(TimingAction.class);
- if(time != null) {
- return time.getStartTime();
- }
- return -1L;
- }
-
- /**
- * Returns the accessor to the logs of a certain {@code FlowNode}, if it has logs.
- * @param flowNode
- * @return accessor to the flowNode logs.
- */
- private static AnnotatedLargeText getLogText(FlowNode flowNode) {
- final LogAction logAction = flowNode.getAction(LogAction.class);
- if(logAction != null) {
- return logAction.getLogText();
- }
- return null;
- }
-
- /**
- * Returns the {@code Throwable} of a certain {@code FlowNode}, if it has errors.
- * @param flowNode
- * @return throwable associated with a certain flowNode.
- */
- private static Throwable getErrorObj(FlowNode flowNode) {
- final ErrorAction errorAction = flowNode.getAction(ErrorAction.class);
- return (errorAction != null) ? errorAction.getError() : null;
- }
-
- /**
- * Returns the error message for unstable pipelines
- * @param flowNode
- * @return error message
- */
- private static String getUnstableMessage(FlowNode flowNode) {
- final WarningAction warningAction = flowNode.getAction(WarningAction.class);
- return (warningAction != null) ? warningAction.getMessage() : null;
- }
-
- @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE")
- private StepData getStepData(final FlowNode flowNode) {
- final Run, ?> run = getRun(flowNode);
- if(run == null) {
- logger.fine("Unable to get StepData from flowNode '"+flowNode.getDisplayName()+"'. Run is null");
- return null;
- }
-
- final StepDataAction stepDataAction = run.getAction(StepDataAction.class);
- if(stepDataAction == null) {
- logger.fine("Unable to get StepData from flowNode '"+flowNode.getDisplayName()+"'. StepDataAction is null");
- return null;
- }
-
- return stepDataAction.get(run, flowNode);
- }
-
- @SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE")
- private StepTraceData getStepTraceData(FlowNode flowNode) {
- final Run, ?> run = getRun(flowNode);
- if(run == null) {
- logger.fine("Unable to get StepTraceData from flowNode '"+flowNode.getDisplayName()+"'. Run is null");
- return null;
- }
-
- final StepTraceDataAction stepTraceDataAction = run.getAction(StepTraceDataAction.class);
- if(stepTraceDataAction == null) {
- logger.fine("Unable to get StepTraceData from flowNode '"+flowNode.getDisplayName()+"'. StepTraceDataAction is null");
- return null;
- }
-
- return stepTraceDataAction.get(run, flowNode);
- }
-
- private FlowNodeQueueData getQueueData(FlowNode node) {
- final Run, ?> run = getRun(node);
- if(run == null) {
- logger.fine("Unable to get QueueData from node '"+node.getDisplayName()+"'. Run is null");
- return null;
- }
-
- PipelineQueueInfoAction pipelineQueueInfoAction = run.getAction(PipelineQueueInfoAction.class);
- if (pipelineQueueInfoAction == null) {
- logger.fine("Unable to get QueueInfoAction from node '"+node.getDisplayName()+"'. QueueInfoAction is null");
- return null;
- }
-
- return pipelineQueueInfoAction.get(run, node.getId());
- }
-
- private Run, ?> getRun(final FlowNode node) {
- if(node == null || node.getExecution() == null || node.getExecution().getOwner() == null) {
- return null;
- }
-
- try {
- return (Run, ?>) node.getExecution().getOwner().getExecutable();
- } catch (Exception e){
- return null;
- }
- }
-
- public static class BuildPipelineNodeKey {
- private final String id;
- private final String name;
-
- public BuildPipelineNodeKey(final String stageId, final String stageName) {
- this.id = stageId;
- this.name = stageName;
- }
-
- public String getId() {
- return id;
- }
-
- public String getName() {
- return name;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- BuildPipelineNodeKey that = (BuildPipelineNodeKey) o;
- return Objects.equals(id, that.id) &&
- Objects.equals(name, that.name);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(id, name);
- }
- }
-
-
- static class BuildPipelineNodeComparator implements Comparator, Serializable {
-
- @Override
- public int compare(BuildPipelineNode o1, BuildPipelineNode o2) {
- if(o1.getStartTime() == -1L || o2.getStartTime() == -1L) {
- return 0;
- }
-
- if(o1.getStartTime() < o2.getStartTime()) {
- return -1;
- } else if (o1.getStartTime() > o2.getStartTime()){
- return 1;
- }
- return 0;
- }
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java
deleted file mode 100644
index 986496ec6..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/CIGlobalTagsAction.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import hudson.model.InvisibleAction;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-
-public class CIGlobalTagsAction extends InvisibleAction implements Serializable {
-
- private final Map tags;
-
- public CIGlobalTagsAction(final Map tags) {
- this.tags = tags != null ? tags : new HashMap<>();
- }
-
- public Map getTags() {
- return tags;
- }
-
- public void putAll(Map tags) {
- this.tags.putAll(tags);
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java
new file mode 100644
index 000000000..88cb56657
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/DatadogPluginAction.java
@@ -0,0 +1,10 @@
+package org.datadog.jenkins.plugins.datadog.model;
+
+import hudson.model.InvisibleAction;
+import java.io.Serializable;
+
+/**
+ * Marker interface for all actions that are added by the plugin
+ */
+public abstract class DatadogPluginAction extends InvisibleAction implements Serializable {
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java
deleted file mode 100644
index b9f697030..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/FlowNodeQueueData.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import java.io.Serializable;
-
-/**
- * Keeps the timestamps of a certain FlowNode based on the onEnterBuildable and onLeaveBuildable callbacks.
- */
-public class FlowNodeQueueData implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final String nodeId;
- private long enterBuildableNanos;
- private long leaveBuildableNanos;
- private long queueTimeNanos = -1L;
-
- public FlowNodeQueueData(final String nodeId) {
- this.nodeId = nodeId;
- }
-
- public void setEnterBuildableNanos(long timestampNanos) {
- this.enterBuildableNanos = timestampNanos;
- }
-
- public void setLeaveBuildableNanos(long timestampNanos) {
- this.leaveBuildableNanos = timestampNanos;
- this.queueTimeNanos = this.leaveBuildableNanos - this.enterBuildableNanos;
- }
-
- public long getNanosInQueue() {
- return this.queueTimeNanos;
- }
-
- @Override
- public String toString() {
- final StringBuilder sb = new StringBuilder("FlowNodeQueueData{");
- sb.append("nodeId='").append(nodeId).append('\'');
- sb.append(", enterBuildableNanos=").append(enterBuildableNanos);
- sb.append(", leaveBuildableNanos=").append(leaveBuildableNanos);
- sb.append(", queueTimeNanos=").append(queueTimeNanos);
- sb.append('}');
- return sb.toString();
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java
index b516cf6a0..1046a6bd9 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitCommitAction.java
@@ -1,126 +1,237 @@
package org.datadog.jenkins.plugins.datadog.model;
-import hudson.model.InvisibleAction;
-
-import java.io.Serializable;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
/**
* Keeps the Git commit related information.
*/
-public class GitCommitAction extends InvisibleAction implements Serializable {
+public class GitCommitAction extends DatadogPluginAction {
private static final long serialVersionUID = 1L;
- private final String commit;
- private final String message;
- private final String authorName;
- private final String authorEmail;
- private final String authorDate;
- private final String committerName;
- private final String committerEmail;
- private final String committerDate;
-
- private GitCommitAction(Builder builder) {
- this.commit = builder.commit;
- this.message = builder.message;
- this.authorName = builder.authorName;
- this.authorEmail = builder.authorEmail;
- this.authorDate = builder.authorDate;
- this.committerName = builder.committerName;
- this.committerEmail = builder.committerEmail;
- this.committerDate = builder.committerDate;
- }
-
- public static Builder newBuilder() {
- return new Builder();
- }
-
- public static class Builder {
- private String commit;
- private String message;
- private String authorName;
- private String authorEmail;
- private String authorDate;
- private String committerName;
- private String committerEmail;
- private String committerDate;
-
- private Builder(){}
-
- public Builder withCommit(final String commit) {
- this.commit = commit;
- return this;
- }
-
- public Builder withMessage(final String message) {
- this.message = message;
- return this;
- }
-
- public Builder withAuthorName(final String authorName) {
- this.authorName = authorName;
- return this;
- }
-
- public Builder withAuthorEmail(final String authorEmail) {
- this.authorEmail = authorEmail;
- return this;
- }
-
- public Builder withAuthorDate(final String authorDate) {
- this.authorDate = authorDate;
- return this;
- }
-
- public Builder withCommitterName(final String committerName){
- this.committerName = committerName;
- return this;
- }
+ private volatile String tag;
+ private volatile String commit;
+ private volatile String message;
+ private volatile String authorName;
+ private volatile String authorEmail;
+ private volatile String authorDate;
+ private volatile String committerName;
+ private volatile String committerEmail;
+ private volatile String committerDate;
+
+ public GitCommitAction() {
+ }
- public Builder withCommitterEmail(final String committerEmail) {
- this.committerEmail = committerEmail;
- return this;
- }
+ public GitCommitAction(String tag, String commit, String message, String authorName, String authorEmail, String authorDate, String committerName, String committerEmail, String committerDate) {
+ this.tag = tag;
+ this.commit = commit;
+ this.message = message;
+ this.authorName = authorName;
+ this.authorEmail = authorEmail;
+ this.authorDate = authorDate;
+ this.committerName = committerName;
+ this.committerEmail = committerEmail;
+ this.committerDate = committerDate;
+ }
- public Builder withCommitterDate(final String committerDate) {
- this.committerDate = committerDate;
- return this;
- }
+ public String getTag() {
+ return tag;
+ }
- public GitCommitAction build() {
- return new GitCommitAction(this);
- }
+ public void setTag(String tag) {
+ this.tag = tag;
}
public String getCommit() {
return commit;
}
+ public void setCommit(String commit) {
+ this.commit = commit;
+ }
+
public String getMessage() {
return message;
}
+ public void setMessage(String message) {
+ this.message = message;
+ }
+
public String getAuthorName() {
return authorName;
}
+ public void setAuthorName(String authorName) {
+ this.authorName = authorName;
+ }
+
public String getAuthorEmail() {
return authorEmail;
}
+ public void setAuthorEmail(String authorEmail) {
+ this.authorEmail = authorEmail;
+ }
+
public String getAuthorDate() {
return authorDate;
}
+ public void setAuthorDate(String authorDate) {
+ this.authorDate = authorDate;
+ }
+
public String getCommitterName() {
return committerName;
}
+ public void setCommitterName(String committerName) {
+ this.committerName = committerName;
+ }
+
public String getCommitterEmail() {
return committerEmail;
}
+ public void setCommitterEmail(String committerEmail) {
+ this.committerEmail = committerEmail;
+ }
+
public String getCommitterDate() {
return committerDate;
}
+
+ public void setCommitterDate(String committerDate) {
+ this.committerDate = committerDate;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GitCommitAction that = (GitCommitAction) o;
+ return Objects.equals(tag, that.tag) && Objects.equals(commit, that.commit) && Objects.equals(message, that.message) && Objects.equals(authorName, that.authorName) && Objects.equals(authorEmail, that.authorEmail) && Objects.equals(authorDate, that.authorDate) && Objects.equals(committerName, that.committerName) && Objects.equals(committerEmail, that.committerEmail) && Objects.equals(committerDate, that.committerDate);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(tag, commit, message, authorName, authorEmail, authorDate, committerName, committerEmail, committerDate);
+ }
+
+ @Override
+ public String toString() {
+ return "GitCommitAction{" +
+ "tag='" + tag + '\'' +
+ ", commit='" + commit + '\'' +
+ ", message='" + message + '\'' +
+ ", authorName='" + authorName + '\'' +
+ ", authorEmail='" + authorEmail + '\'' +
+ ", authorDate='" + authorDate + '\'' +
+ ", committerName='" + committerName + '\'' +
+ ", committerEmail='" + committerEmail + '\'' +
+ ", committerDate='" + committerDate + '\'' +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return GitCommitAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ GitCommitAction action = (GitCommitAction) source;
+ if (action.tag != null) {
+ writeField("tag", action.tag, writer, context);
+ }
+ if (action.commit != null) {
+ writeField("commit", action.commit, writer, context);
+ }
+ if (action.message != null) {
+ writeField("message", action.message, writer, context);
+ }
+ if (action.authorName != null) {
+ writeField("authorName", action.authorName, writer, context);
+ }
+ if (action.authorEmail != null) {
+ writeField("authorEmail", action.authorEmail, writer, context);
+ }
+ if (action.authorDate != null) {
+ writeField("authorDate", action.authorDate, writer, context);
+ }
+ if (action.committerName != null) {
+ writeField("committerName", action.committerName, writer, context);
+ }
+ if (action.committerEmail != null) {
+ writeField("committerEmail", action.committerEmail, writer, context);
+ }
+ if (action.committerDate != null) {
+ writeField("committerDate", action.committerDate, writer, context);
+ }
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ String tag = null;
+ String commit = null;
+ String message = null;
+ String authorName = null;
+ String authorEmail = null;
+ String authorDate = null;
+ String committerName = null;
+ String committerEmail = null;
+ String committerDate = null;
+
+ while (reader.hasMoreChildren()) {
+ reader.moveDown();
+ String fieldName = reader.getNodeName();
+ switch (fieldName) {
+ case "tag":
+ tag = (String) context.convertAnother(null, String.class);
+ break;
+ case "commit":
+ commit = (String) context.convertAnother(null, String.class);
+ break;
+ case "message":
+ message = (String) context.convertAnother(null, String.class);
+ break;
+ case "authorName":
+ authorName = (String) context.convertAnother(null, String.class);
+ break;
+ case "authorEmail":
+ authorEmail = (String) context.convertAnother(null, String.class);
+ break;
+ case "authorDate":
+ authorDate = (String) context.convertAnother(null, String.class);
+ break;
+ case "committerName":
+ committerName = (String) context.convertAnother(null, String.class);
+ break;
+ case "committerEmail":
+ committerEmail = (String) context.convertAnother(null, String.class);
+ break;
+ case "committerDate":
+ committerDate = (String) context.convertAnother(null, String.class);
+ break;
+ default:
+ // unknown tag, could be something serialized by a different version of the plugin
+ break;
+ }
+ reader.moveUp();
+ }
+
+ return new GitCommitAction(tag, commit, message, authorName, authorEmail, authorDate, committerName, committerEmail, committerDate);
+ }
+ }
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java
index 29e586ca3..d9c978936 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/GitRepositoryAction.java
@@ -1,54 +1,129 @@
package org.datadog.jenkins.plugins.datadog.model;
-import hudson.model.InvisibleAction;
-
-import java.io.Serializable;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import javax.annotation.Nullable;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
/**
* Keeps the Git repository related information.
*/
-public class GitRepositoryAction extends InvisibleAction implements Serializable {
+public class GitRepositoryAction extends DatadogPluginAction {
private static final long serialVersionUID = 1L;
- private final String repositoryURL;
- private final String defaultBranch;
+ private volatile String repositoryURL;
+ private volatile String defaultBranch;
+ private volatile String branch;
+
+ public GitRepositoryAction() {
+ }
- private GitRepositoryAction(final Builder builder) {
- this.repositoryURL = builder.repositoryURL;
- this.defaultBranch = builder.defaultBranch;
+ public GitRepositoryAction(String repositoryURL, String defaultBranch, String branch) {
+ this.repositoryURL = repositoryURL;
+ this.defaultBranch = defaultBranch;
+ this.branch = branch;
}
- public String getRepositoryURL(){
+ @Nullable
+ public String getRepositoryURL() {
return repositoryURL;
}
+ public void setRepositoryURL(String repositoryURL) {
+ this.repositoryURL = repositoryURL;
+ }
+
+ @Nullable
public String getDefaultBranch() {
return defaultBranch;
}
- public static Builder newBuilder() {
- return new Builder();
+ public void setDefaultBranch(String defaultBranch) {
+ this.defaultBranch = defaultBranch;
+ }
+
+ @Nullable
+ public String getBranch() {
+ return branch;
}
- public static class Builder {
- private String repositoryURL;
- private String defaultBranch;
+ public void setBranch(String branch) {
+ this.branch = branch;
+ }
- private Builder(){}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ GitRepositoryAction that = (GitRepositoryAction) o;
+ return Objects.equals(repositoryURL, that.repositoryURL) && Objects.equals(defaultBranch, that.defaultBranch) && Objects.equals(branch, that.branch);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(repositoryURL, defaultBranch, branch);
+ }
+
+ @Override
+ public String toString() {
+ return "GitRepositoryAction{" +
+ "repositoryURL='" + repositoryURL + '\'' +
+ ", defaultBranch='" + defaultBranch + '\'' +
+ ", branch='" + branch + '\'' +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
- public Builder withRepositoryURL(final String repositoryURL) {
- this.repositoryURL = repositoryURL;
- return this;
+ @Override
+ public boolean canConvert(Class type) {
+ return GitRepositoryAction.class == type;
}
- public Builder withDefaultBranch(final String defaultBranch) {
- this.defaultBranch = defaultBranch;
- return this;
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ GitRepositoryAction action = (GitRepositoryAction) source;
+ if (action.repositoryURL != null) {
+ writeField("repositoryURL", action.repositoryURL, writer, context);
+ }
+ if (action.defaultBranch != null) {
+ writeField("defaultBranch", action.defaultBranch, writer, context);
+ }
+ if (action.branch != null) {
+ writeField("branch", action.branch, writer, context);
+ }
}
- public GitRepositoryAction build(){
- return new GitRepositoryAction(this);
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ GitRepositoryAction gitRepositoryAction = new GitRepositoryAction();
+ while (reader.hasMoreChildren()) {
+ reader.moveDown();
+ String fieldName = reader.getNodeName();
+ switch (fieldName) {
+ case "repositoryURL":
+ gitRepositoryAction.setRepositoryURL((String) context.convertAnother(null, String.class));
+ break;
+ case "defaultBranch":
+ gitRepositoryAction.setDefaultBranch((String) context.convertAnother(null, String.class));
+ break;
+ case "branch":
+ gitRepositoryAction.setBranch((String) context.convertAnother(null, String.class));
+ break;
+ default:
+ // unknown tag, could be something serialized by a different version of the plugin
+ break;
+ }
+ reader.moveUp();
+ }
+ return gitRepositoryAction;
}
}
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java
index 052d4edd3..abafd8697 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineNodeInfoAction.java
@@ -1,21 +1,27 @@
package org.datadog.jenkins.plugins.datadog.model;
-import hudson.model.InvisibleAction;
-
-import java.io.Serializable;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Collections;
+import java.util.Objects;
import java.util.Set;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
-public class PipelineNodeInfoAction extends InvisibleAction implements Serializable {
+public class PipelineNodeInfoAction extends DatadogPluginAction {
private final String nodeName;
private final Set nodeLabels;
-
private final String nodeHostname;
+ private final String workspace;
- public PipelineNodeInfoAction(final String nodeName, final Set nodeLabels, final String nodeHostname) {
+ public PipelineNodeInfoAction(final String nodeName, final Set nodeLabels, final String nodeHostname, String workspace) {
this.nodeName = nodeName;
this.nodeLabels = nodeLabels;
this.nodeHostname = nodeHostname;
+ this.workspace = workspace;
}
public String getNodeName() {
@@ -29,4 +35,91 @@ public Set getNodeLabels() {
public String getNodeHostname() {
return nodeHostname;
}
+
+ public String getWorkspace() {
+ return workspace;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PipelineNodeInfoAction that = (PipelineNodeInfoAction) o;
+ return Objects.equals(nodeName, that.nodeName) && Objects.equals(nodeLabels, that.nodeLabels) && Objects.equals(nodeHostname, that.nodeHostname) && Objects.equals(workspace, that.workspace);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nodeName, nodeLabels, nodeHostname, workspace);
+ }
+
+ @Override
+ public String toString() {
+ return "PipelineNodeInfoAction{" +
+ "nodeName='" + nodeName + '\'' +
+ ", nodeLabels=" + nodeLabels +
+ ", nodeHostname='" + nodeHostname + '\'' +
+ ", workspace='" + workspace + '\'' +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return PipelineNodeInfoAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ PipelineNodeInfoAction action = (PipelineNodeInfoAction) source;
+ if (action.nodeName != null) {
+ writeField("nodeName", action.nodeName, writer, context);
+ }
+ if (action.nodeHostname != null) {
+ writeField("nodeHostname", action.nodeHostname, writer, context);
+ }
+ if (action.nodeLabels != null && !action.nodeLabels.isEmpty()) {
+ writeField("nodeLabels", action.nodeLabels, writer, context);
+ }
+ if (action.workspace != null) {
+ writeField("workspace", action.workspace, writer, context);
+ }
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ String nodeName = null;
+ String nodeHostname = null;
+ Set nodeLabels = Collections.emptySet();
+ String workspace = null;
+
+ while (reader.hasMoreChildren()) {
+ reader.moveDown();
+ String fieldName = reader.getNodeName();
+ switch (fieldName) {
+ case "nodeName":
+ nodeName = (String) context.convertAnother(null, String.class);
+ break;
+ case "nodeHostname":
+ nodeHostname = (String) context.convertAnother(null, String.class);
+ break;
+ case "nodeLabels":
+ nodeLabels = (Set) context.convertAnother(null, Set.class);
+ break;
+ case "workspace":
+ workspace = (String) context.convertAnother(null, String.class);
+ break;
+ default:
+ // unknown tag, could be something serialized by a different version of the plugin
+ break;
+ }
+ reader.moveUp();
+ }
+
+ return new PipelineNodeInfoAction(nodeName, nodeLabels, nodeHostname, workspace);
+ }
+ }
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java
index 5f5599cc1..c7f79e7f5 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineQueueInfoAction.java
@@ -1,40 +1,109 @@
package org.datadog.jenkins.plugins.datadog.model;
-import hudson.model.InvisibleAction;
-import hudson.model.Run;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-/**
- * Keeps the Queue Info related to the FlowNode scheduled to be executed.
- */
-public class PipelineQueueInfoAction extends InvisibleAction implements Serializable {
+public class PipelineQueueInfoAction extends DatadogPluginAction {
private static final long serialVersionUID = 1L;
- private final ConcurrentMap queueDataByFlowNode;
+ private volatile long queueTimeMillis = -1;
+ private volatile long propagatedQueueTimeMillis = -1;
+
+ public PipelineQueueInfoAction() {}
+
+ public PipelineQueueInfoAction(long queueTimeMillis, long propagatedQueueTimeMillis) {
+ this.queueTimeMillis = queueTimeMillis;
+ this.propagatedQueueTimeMillis = propagatedQueueTimeMillis;
+ }
+
+ public long getQueueTimeMillis() {
+ return queueTimeMillis;
+ }
- public PipelineQueueInfoAction() {
- this.queueDataByFlowNode = new ConcurrentHashMap<>();
+ public PipelineQueueInfoAction setQueueTimeMillis(long queueTimeMillis) {
+ this.queueTimeMillis = queueTimeMillis;
+ return this;
}
- public FlowNodeQueueData get(final Run,?> run, String flowNodeId) {
- return this.queueDataByFlowNode.get(flowNodeId);
+ public long getPropagatedQueueTimeMillis() {
+ return propagatedQueueTimeMillis;
}
- public void put(final Run,?> run, String flowNodeId, FlowNodeQueueData data) {
- this.queueDataByFlowNode.put(flowNodeId, data);
+ public PipelineQueueInfoAction setPropagatedQueueTimeMillis(long propagatedQueueTimeMillis) {
+ this.propagatedQueueTimeMillis = propagatedQueueTimeMillis;
+ return this;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PipelineQueueInfoAction that = (PipelineQueueInfoAction) o;
+ return queueTimeMillis == that.queueTimeMillis && propagatedQueueTimeMillis == that.propagatedQueueTimeMillis;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(queueTimeMillis, propagatedQueueTimeMillis);
}
@Override
public String toString() {
- final StringBuilder sb = new StringBuilder("PipelineQueueInfoAction{");
- sb.append("queueDataByFlowNode=").append(queueDataByFlowNode);
- sb.append('}');
- return sb.toString();
+ return "QueueInfoAction{" +
+ "queueTimeMillis=" + queueTimeMillis +
+ ", propagatedQueueTimeMillis=" + propagatedQueueTimeMillis +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return PipelineQueueInfoAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ PipelineQueueInfoAction action = (PipelineQueueInfoAction) source;
+ if (action.queueTimeMillis != -1) {
+ writeField("queueTimeMillis", action.queueTimeMillis, writer, context);
+ }
+ if (action.propagatedQueueTimeMillis != -1) {
+ writeField("propagatedQueueTimeMillis", action.propagatedQueueTimeMillis, writer, context);
+ }
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ long queueTimeMillis = -1;
+ long propagatedQueueTimeMillis = -1;
+
+ while (reader.hasMoreChildren()) {
+ reader.moveDown();
+ String fieldName = reader.getNodeName();
+ switch (fieldName) {
+ case "queueTimeMillis":
+ queueTimeMillis = (long) context.convertAnother(null, long.class);
+ break;
+ case "propagatedQueueTimeMillis":
+ propagatedQueueTimeMillis = (long) context.convertAnother(null, long.class);
+ break;
+ default:
+ // unknown tag, could be something serialized by a different version of the plugin
+ break;
+ }
+ reader.moveUp();
+ }
+
+ return new PipelineQueueInfoAction(queueTimeMillis, propagatedQueueTimeMillis);
+ }
}
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineStepData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineStepData.java
new file mode 100644
index 000000000..e2451eaf7
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/PipelineStepData.java
@@ -0,0 +1,297 @@
+package org.datadog.jenkins.plugins.datadog.model;
+
+import hudson.model.Run;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
+import org.datadog.jenkins.plugins.datadog.model.node.DequeueAction;
+import org.datadog.jenkins.plugins.datadog.model.node.NodeInfoAction;
+import org.datadog.jenkins.plugins.datadog.model.node.StatusAction;
+import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction;
+import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
+import org.jenkinsci.plugins.workflow.actions.ArgumentsAction;
+import org.jenkinsci.plugins.workflow.actions.WarningAction;
+import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode;
+import org.jenkinsci.plugins.workflow.graph.BlockEndNode;
+import org.jenkinsci.plugins.workflow.graph.BlockStartNode;
+import org.jenkinsci.plugins.workflow.graph.FlowNode;
+
+/**
+ * Represents a step in a Jenkins Pipeline.
+ */
+public class PipelineStepData {
+
+ public enum StepType {
+ PIPELINE("ci.pipeline", "pipeline"),
+ STAGE("ci.stage", "stage"),
+ STEP("ci.job", "job");
+
+ private final String tagName;
+ private final String buildLevel;
+
+ StepType(final String tagName, final String buildLevel) {
+ this.tagName = tagName;
+ this.buildLevel = buildLevel;
+ }
+
+ public String getTagName() {
+ return tagName;
+ }
+
+ public String getBuildLevel() {
+ return buildLevel;
+ }
+ }
+
+ private String id;
+ private String name;
+ private String stageId;
+ private String stageName;
+
+ private StepType type;
+ private Map args;
+ private String workspace;
+ private String nodeName;
+ private Set nodeLabels;
+ private String nodeHostname;
+ private long startTimeMicros;
+ private long endTimeMicros;
+ private long nanosInQueue;
+ private String jenkinsResult;
+ private Status status;
+
+ // Throwable of the node.
+ // Although the error flag was true, this can be null.
+ private Throwable errorObj;
+ private String unstableMessage;
+
+ //Tracing
+ private long spanId;
+ private long parentSpanId = -1;
+ private long traceId;
+
+ public PipelineStepData(final Run, ?> run, final BlockStartNode startNode, final BlockEndNode> endNode) {
+ this(run, startNode);
+
+ this.type = StepType.STAGE;
+
+ this.startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(startNode));
+ if (startTimeMicros < 0) {
+ throw new IllegalStateException("Step " + startNode.getId() + " (" + startNode.getDisplayName() + ") has no start time info");
+ }
+
+ this.endTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(endNode));
+ if (endTimeMicros < 0) {
+ throw new IllegalStateException("Step " + endNode.getId() + " (" + endNode.getDisplayName() + ") has no end time info");
+ }
+
+ this.jenkinsResult = DatadogUtilities.getResultTag(endNode);
+ this.status = getStatus(startNode, jenkinsResult);
+ this.errorObj = DatadogUtilities.getErrorObj(endNode);
+ this.unstableMessage = getUnstableMessage(startNode);
+
+ NodeInfoAction nodeInfoAction = startNode.getAction(NodeInfoAction.class);
+ if (nodeInfoAction != null) {
+ this.nodeName = nodeInfoAction.getNodeName();
+ this.nodeHostname = nodeInfoAction.getNodeHostname();
+ this.nodeLabels = nodeInfoAction.getNodeLabels();
+ this.workspace = nodeInfoAction.getNodeWorkspace();
+ }
+ }
+
+ public PipelineStepData(final Run, ?> run, final StepAtomNode stepNode, final FlowNode nextNode) {
+ this(run, stepNode);
+
+ this.type = StepType.STEP;
+
+ this.startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(stepNode));
+ if (startTimeMicros < 0) {
+ throw new IllegalStateException("Step " + stepNode.getId() + " (" + stepNode.getDisplayName() + ") has no start time info");
+ }
+
+ this.endTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(nextNode));
+ if (endTimeMicros < 0) {
+ throw new IllegalStateException("Step " + nextNode.getId() + " (" + nextNode.getDisplayName() + ") has no time info");
+ }
+
+ this.jenkinsResult = DatadogUtilities.getResultTag(stepNode);
+ this.status = getStatus(stepNode, jenkinsResult);
+ this.errorObj = DatadogUtilities.getErrorObj(stepNode);
+ this.unstableMessage = getUnstableMessage(stepNode);
+
+ BlockStartNode enclosingStage = DatadogUtilities.getEnclosingStageNode(stepNode);
+ if (enclosingStage != null) {
+ NodeInfoAction enclosingStageInfoAction = enclosingStage.getAction(NodeInfoAction.class);
+ if (enclosingStageInfoAction != null) {
+ this.nodeName = enclosingStageInfoAction.getNodeName();
+ this.nodeHostname = enclosingStageInfoAction.getNodeHostname();
+ this.nodeLabels = enclosingStageInfoAction.getNodeLabels();
+ this.workspace = enclosingStageInfoAction.getNodeWorkspace();
+ }
+ }
+ }
+
+ private PipelineStepData(final Run, ?> run, FlowNode startNode) {
+ TraceInfoAction traceInfoAction = run.getAction(TraceInfoAction.class);
+ if (traceInfoAction != null) {
+ /*
+ * Use "remove-or-create" semantics:
+ * - if the ID is there in the action, remove it since it is no longer needed (we're about to submit this node and be done with it)
+ * - if the ID is not there, create a new one on the spot without saving it in the action (IDs are initialized lazily, if the node's ID is not there, it means the node had no children that needed to know its ID)
+ */
+ Long spanId = traceInfoAction.removeOrCreate(startNode.getId());
+ if (spanId != null) {
+ this.spanId = spanId;
+ }
+ } else {
+ throw new IllegalStateException("Step " + startNode.getId() + " (" + startNode.getDisplayName() + ") has no span info." +
+ "It is possible that CI Visibility was enabled while this step was in progress");
+ }
+
+ /*
+ * Find node's parent: iterate over the blocks that contain it, starting with the innermost,
+ * until we find a block that is included in the trace (a block that corresponds to a stage).
+ */
+ BlockStartNode enclosingStage = DatadogUtilities.getEnclosingStageNode(startNode);
+ if (enclosingStage != null) {
+ this.stageId = enclosingStage.getId();
+ this.stageName = enclosingStage.getDisplayName();
+
+ Long parentSpanId = traceInfoAction.getOrCreate(enclosingStage.getId());
+ if (parentSpanId != null) {
+ this.parentSpanId = parentSpanId;
+ }
+ }
+
+ BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class);
+ if (buildSpanAction != null) {
+ TraceSpan.TraceSpanContext traceContext = buildSpanAction.getBuildSpanContext();
+ this.traceId = traceContext.getTraceId();
+
+ /*
+ * If we didn't find this node's parent previously,
+ * then it is a top-level stage, so its parent will be the span that correspond to the build as a whole.
+ */
+ if (this.parentSpanId == -1) {
+ this.parentSpanId = traceContext.getSpanId();
+ }
+ } else {
+ throw new IllegalStateException("Step " + startNode.getId() + " (" + startNode.getDisplayName() + ") has no trace info." +
+ "It is possible that CI Visibility was enabled while this step was in progress");
+ }
+
+ DequeueAction queueInfoAction = startNode.getAction(DequeueAction.class);
+ if (queueInfoAction != null) {
+ this.nanosInQueue = queueInfoAction.getQueueTimeNanos();
+ }
+
+ this.id = startNode.getId();
+ this.name = startNode.getDisplayName();
+ this.args = ArgumentsAction.getFilteredArguments(startNode);
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getStageId() {
+ return stageId;
+ }
+
+ public String getStageName() {
+ return stageName;
+ }
+
+ public Map getArgs() {
+ return args;
+ }
+
+ public String getWorkspace() {
+ return workspace;
+ }
+
+ public String getNodeName() {
+ return nodeName;
+ }
+
+ public Set getNodeLabels() {
+ return nodeLabels;
+ }
+
+ public String getNodeHostname() {
+ return nodeHostname;
+ }
+
+ public long getStartTimeMicros() {
+ return startTimeMicros;
+ }
+
+ public long getEndTimeMicros() {
+ return endTimeMicros;
+ }
+
+ public long getNanosInQueue() {
+ return nanosInQueue;
+ }
+
+ public String getJenkinsResult() {
+ return jenkinsResult;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public Throwable getErrorObj() {
+ return errorObj;
+ }
+
+ public String getUnstableMessage() {
+ return unstableMessage;
+ }
+
+ public boolean isError() {
+ return status == Status.ERROR;
+ }
+
+ public boolean isUnstable() {
+ return status == Status.UNSTABLE;
+ }
+
+ public long getSpanId() {
+ return spanId;
+ }
+
+ public long getParentSpanId() {
+ return parentSpanId;
+ }
+
+ public long getTraceId() {
+ return traceId;
+ }
+
+ public StepType getType() {
+ return type;
+ }
+
+ private static Status getStatus(FlowNode node, String jenkinsResult) {
+ Status nodeStatus = Status.fromJenkinsResult(jenkinsResult);
+ StatusAction statusAction = node.getAction(StatusAction.class);
+ return statusAction != null ? Status.combine(nodeStatus, statusAction.getStatus()) : nodeStatus;
+ }
+
+ /**
+ * Returns the error message for unstable pipelines
+ *
+ * @return error message
+ */
+ private static String getUnstableMessage(FlowNode flowNode) {
+ final WarningAction warningAction = flowNode.getAction(WarningAction.class);
+ return (warningAction != null) ? warningAction.getMessage() : null;
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java
deleted file mode 100644
index bc80aa1f2..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StageBreakdownAction.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import hudson.model.InvisibleAction;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Keeps the Stage breakdown related information.
- */
-public class StageBreakdownAction extends InvisibleAction implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final Map stageDataByName;
-
- public StageBreakdownAction() {
- this.stageDataByName = new HashMap<>();
- }
-
- public Map getStageDataByName() {
- return stageDataByName;
- }
-
- public void put(String name, StageData stageData) {
- this.stageDataByName.put(name, stageData);
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java
new file mode 100644
index 000000000..7d293d111
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/Status.java
@@ -0,0 +1,35 @@
+package org.datadog.jenkins.plugins.datadog.model;
+
+import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
+
+public enum Status {
+ UNKNOWN((byte) 0), SUCCESS((byte) 1), UNSTABLE((byte) 2), ERROR((byte) 3), SKIPPED((byte) 4), CANCELED((byte) 5);
+
+ private final byte weight;
+
+ Status(byte weight) {
+ this.weight = weight;
+ }
+
+ public String toTag() {
+ return toString().toLowerCase();
+ }
+
+ public static Status fromJenkinsResult(String status) {
+ return valueOf(DatadogUtilities.statusFromResult(status).toUpperCase());
+ }
+
+ /**
+ * Combines two statuses, returning the worst one
+ * (based on {@link hudson.model.Result#combine(hudson.model.Result, hudson.model.Result)}).
+ */
+ public static Status combine(Status a, Status b) {
+ if (a == null) {
+ return b;
+ }
+ if (b == null) {
+ return a;
+ }
+ return a.weight > b.weight ? a : b;
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java
deleted file mode 100644
index 4745aba2e..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepData.java
+++ /dev/null
@@ -1,157 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import hudson.EnvVars;
-import hudson.FilePath;
-import hudson.model.Computer;
-import org.datadog.jenkins.plugins.datadog.DatadogGlobalConfiguration;
-import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
-import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit;
-import org.jenkinsci.plugins.workflow.steps.StepContext;
-
-import java.io.Serializable;
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Logger;
-import java.util.stream.Collectors;
-
-public class StepData implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private static transient final Logger logger = Logger.getLogger(StepData.class.getName());
-
- private final Map envVars;
- private final String nodeName;
- private final String nodeHostname;
- private final String workspace;
- private final Set nodeLabels;
-
- public StepData(final StepContext stepContext){
- long start = System.currentTimeMillis();
- try {
- this.envVars = getEnvVars(stepContext);
- this.nodeName = getNodeName(stepContext);
- this.nodeHostname = getNodeHostname(stepContext, this.envVars);
- this.workspace = getNodeWorkspace(stepContext);
- this.nodeLabels = getNodeLabels(stepContext);
- } finally {
- long end = System.currentTimeMillis();
- DatadogAudit.log("StepData.ctor", start, end);
- }
- }
-
- public Map getEnvVars() {
- return envVars;
- }
-
- public String getNodeName() {
- return nodeName;
- }
-
- public String getNodeHostname() {
- return nodeHostname;
- }
-
- public String getWorkspace() {
- return workspace;
- }
-
- public Set getNodeLabels() {
- return nodeLabels;
- }
-
- /**
- * Returns the workspace filepath of the remote node which is executing a determined {@code Step}
- * @param stepContext
- * @return absolute filepath of the workspace of the remote node.
- */
- private String getNodeWorkspace(final StepContext stepContext) {
- FilePath filePath = null;
- try {
- filePath = stepContext.get(FilePath.class);
- } catch (Exception e){
- logger.fine("Unable to extract FilePath information of the StepContext.");
- }
-
- if(filePath == null) {
- return null;
- }
-
- return filePath.getRemote();
- }
-
- /**
- * Returns the hostname of the remote node which is executing a determined {@code Step}
- * See {@code Computer.getHostName()}
- * @param stepContext
- * @return hostname of the remote node.
- */
- private String getNodeHostname(final StepContext stepContext, Map envVars) {
- String hostname = envVars.get(DatadogGlobalConfiguration.DD_CI_HOSTNAME);
- if (hostname == null) {
- Computer computer;
- try {
- computer = stepContext.get(Computer.class);
- if(computer != null) {
- hostname = computer.getHostName();
- }
- } catch (Exception e){
- logger.fine("Unable to extract hostname from StepContext.");
- }
- }
- return hostname;
- }
-
-
- /**
- * Returns the nodeName of the remote node which is executing a determined {@code Step}
- * @param stepContext
- * @return node name of the remote node.
- */
- private String getNodeName(StepContext stepContext) {
- try {
- Computer computer = stepContext.get(Computer.class);
- return DatadogUtilities.getNodeName(computer);
- } catch (Exception e){
- logger.fine("Unable to extract the node name from StepContext.");
- return null;
- }
- }
-
-
- /**
- * Returns the nodeLabels of the remote node which is executing a determined {@code Step}
- * @param stepContext
- * @return node labels of the remote node.
- */
- private Set getNodeLabels(StepContext stepContext) {
- try {
- Computer computer = stepContext.get(Computer.class);
- return DatadogUtilities.getNodeLabels(computer);
- } catch (Exception e) {
- logger.fine("Unable to extract the node labels from StepContext.");
- return Collections.emptySet();
- }
- }
-
-
- /**
- * Returns {@code Map} with environment variables of a certain {@code StepContext}
- * @param stepContext
- * @return map with environment variables of a stepContext.
- */
- private Map getEnvVars(StepContext stepContext) {
- EnvVars envVarsObj = null;
- try {
- envVarsObj = stepContext.get(EnvVars.class);
- } catch (Exception e){
- logger.fine("Unable to extract environment variables from StepContext.");
- }
-
- if(envVarsObj == null) {
- return Collections.emptyMap();
- }
- return envVarsObj.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java
deleted file mode 100644
index 82f38f194..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/model/StepTraceData.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.model;
-
-import java.io.Serializable;
-
-public class StepTraceData implements Serializable {
-
- private final long spanId;
-
- public StepTraceData(final long spanId) {
- this.spanId = spanId;
- }
-
- public long getSpanId() {
- return spanId;
- }
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java
new file mode 100644
index 000000000..989f13803
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/TraceInfoAction.java
@@ -0,0 +1,102 @@
+package org.datadog.jenkins.plugins.datadog.model;
+
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import org.datadog.jenkins.plugins.datadog.traces.IdGenerator;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
+
+/**
+ * This action stores mapping between IDs of {@link org.jenkinsci.plugins.workflow.graph.FlowNode}
+ * that are generated by Jenkins, and span IDs that are generated by the plugin.
+ *
+ * Span ID is submitted with the rest of the tracing data when a pipeline step finishes,
+ * but in certain cases it needs to be known before that:
+ *
+ * - before an atomic step starts, we add its span ID to the step's environment so that the logic inside the step could create custom spans that are linked as children to the step's span
+ * - if a stage step contains children, its children need to know their parent stage's span ID to use as their parent ID
+ *
+ * For reasons above, we generate IDs for flow nodes on demand, and store them here
+ * until execution of the corresponding nodes finishes.
+ * Once the execution finishes, the IDs are no longer needed and can be removed.
+ * It is important to remove IDs, because this action is regularly dumped to disk, so it should contain as little data as possible.
+ *
+ * There is a single trace info action associated with the pipeline,
+ * rather than a separate action with a single ID associated with the node.
+ * This is done for performance reasons, as changes to run actions are batched,
+ * while changes to node actions are written to disk immediately.
+ */
+public class TraceInfoAction extends DatadogPluginAction {
+
+ private final ConcurrentMap spanIdByNodeId;
+
+ public TraceInfoAction() {
+ this(Collections.emptyMap());
+ }
+
+ public TraceInfoAction(Map spanIdByNodeId) {
+ this.spanIdByNodeId = new ConcurrentHashMap<>(spanIdByNodeId);
+ }
+
+ public Long getOrCreate(String flowNodeId) {
+ return spanIdByNodeId.computeIfAbsent(flowNodeId, k -> IdGenerator.generate());
+ }
+
+ public Long removeOrCreate(String flowNodeId) {
+ Long existingId = spanIdByNodeId.remove(flowNodeId);
+ if (existingId != null) {
+ return existingId;
+ } else {
+ return IdGenerator.generate();
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TraceInfoAction that = (TraceInfoAction) o;
+ return Objects.equals(spanIdByNodeId, that.spanIdByNodeId);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(spanIdByNodeId);
+ }
+
+ @Override
+ public String toString() {
+ return "TraceInfoAction{" +
+ "infoByFlowNodeId=" + spanIdByNodeId +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return TraceInfoAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ TraceInfoAction action = (TraceInfoAction) source;
+ writeField("infoByFlowNodeId", action.spanIdByNodeId, writer, context);
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ Map infoByFlowNodeId = readField(reader, context, Map.class);
+ return new TraceInfoAction(infoByFlowNodeId);
+ }
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java
new file mode 100644
index 000000000..dbd2aa832
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/DequeueAction.java
@@ -0,0 +1,65 @@
+package org.datadog.jenkins.plugins.datadog.model.node;
+
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
+
+public class DequeueAction extends QueueInfoAction {
+
+ private static final long serialVersionUID = 1L;
+
+ private final long queueTimeNanos;
+
+ public DequeueAction(long queueTimeNanos) {
+ this.queueTimeNanos = queueTimeNanos;
+ }
+
+ public long getQueueTimeNanos() {
+ return queueTimeNanos;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DequeueAction action = (DequeueAction) o;
+ return queueTimeNanos == action.queueTimeNanos;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(queueTimeNanos);
+ }
+
+ @Override
+ public String toString() {
+ return "DequeueAction{queueTimeNanos=" + queueTimeNanos + '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return DequeueAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ DequeueAction action = (DequeueAction) source;
+ writeField("queueTimeNanos", action.queueTimeNanos, writer, context);
+ context.convertAnother(action.queueTimeNanos);
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ long queueTimeNanos = readField(reader, context, long.class);
+ return new DequeueAction(queueTimeNanos);
+ }
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java
new file mode 100644
index 000000000..433c22c33
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/EnqueueAction.java
@@ -0,0 +1,64 @@
+package org.datadog.jenkins.plugins.datadog.model.node;
+
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
+
+public class EnqueueAction extends QueueInfoAction {
+
+ private static final long serialVersionUID = 1L;
+
+ private final long timestampNanos;
+
+ public EnqueueAction(long timestampNanos) {
+ this.timestampNanos = timestampNanos;
+ }
+
+ public long getTimestampNanos() {
+ return timestampNanos;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EnqueueAction that = (EnqueueAction) o;
+ return timestampNanos == that.timestampNanos;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(timestampNanos);
+ }
+
+ @Override
+ public String toString() {
+ return "EnqueueAction{timestampNanos=" + timestampNanos + '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return EnqueueAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ EnqueueAction action = (EnqueueAction) source;
+ writeField("timestampNanos", action.timestampNanos, writer, context);
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ long timestampNanos = readField(reader, context, long.class);
+ return new EnqueueAction(timestampNanos);
+ }
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java
new file mode 100644
index 000000000..9eabab814
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/NodeInfoAction.java
@@ -0,0 +1,128 @@
+package org.datadog.jenkins.plugins.datadog.model.node;
+
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Collections;
+import java.util.Objects;
+import java.util.Set;
+import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
+
+public class NodeInfoAction extends DatadogPluginAction {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String nodeName;
+ private final String nodeHostname;
+ private final Set nodeLabels;
+ private final String nodeWorkspace;
+
+ public NodeInfoAction(String nodeName, String nodeHostname, Set nodeLabels, String nodeWorkspace) {
+ this.nodeName = nodeName;
+ this.nodeHostname = nodeHostname;
+ this.nodeLabels = nodeLabels;
+ this.nodeWorkspace = nodeWorkspace;
+ }
+
+ public String getNodeName() {
+ return nodeName;
+ }
+
+ public String getNodeHostname() {
+ return nodeHostname;
+ }
+
+ public Set getNodeLabels() {
+ return nodeLabels;
+ }
+
+ public String getNodeWorkspace() {
+ return nodeWorkspace;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ NodeInfoAction that = (NodeInfoAction) o;
+ return Objects.equals(nodeName, that.nodeName) && Objects.equals(nodeHostname, that.nodeHostname) && Objects.equals(nodeLabels, that.nodeLabels) && Objects.equals(nodeWorkspace, that.nodeWorkspace);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(nodeName, nodeHostname, nodeLabels, nodeWorkspace);
+ }
+
+ @Override
+ public String toString() {
+ return "NodeInfoAction{" +
+ "nodeName='" + nodeName + '\'' +
+ ", nodeHostname='" + nodeHostname + '\'' +
+ ", nodeLabels=" + nodeLabels +
+ ", nodeWorkspace=" + nodeWorkspace +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return NodeInfoAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ NodeInfoAction action = (NodeInfoAction) source;
+ if (action.nodeName != null) {
+ writeField("nodeName", action.nodeName, writer, context);
+ }
+ if (action.nodeHostname != null) {
+ writeField("nodeHostname", action.nodeHostname, writer, context);
+ }
+ if (action.nodeLabels != null && !action.nodeLabels.isEmpty()) {
+ writeField("nodeLabels", action.nodeLabels, writer, context);
+ }
+ if (action.nodeWorkspace != null && !action.nodeWorkspace.isEmpty()) {
+ writeField("nodeWorkspace", action.nodeWorkspace, writer, context);
+ }
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ String nodeName = null;
+ String nodeHostname = null;
+ Set nodeLabels = Collections.emptySet();
+ String nodeWorkspace = null;
+
+ while (reader.hasMoreChildren()) {
+ reader.moveDown();
+ String fieldName = reader.getNodeName();
+ switch (fieldName) {
+ case "nodeName":
+ nodeName = (String) context.convertAnother(null, String.class);
+ break;
+ case "nodeHostname":
+ nodeHostname = (String) context.convertAnother(null, String.class);
+ break;
+ case "nodeLabels":
+ nodeLabels = (Set) context.convertAnother(null, Set.class);
+ break;
+ case "nodeWorkspace":
+ nodeWorkspace = (String) context.convertAnother(null, String.class);
+ break;
+ default:
+ // unknown tag, could be something serialized by a different version of the plugin
+ break;
+ }
+ reader.moveUp();
+ }
+ return new NodeInfoAction(nodeName, nodeHostname, nodeLabels, nodeWorkspace);
+ }
+ }
+
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java
new file mode 100644
index 000000000..9fdbbb533
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/QueueInfoAction.java
@@ -0,0 +1,10 @@
+package org.datadog.jenkins.plugins.datadog.model.node;
+
+import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction;
+
+/**
+ * A marker interface for enqueue and dequeue actions.
+ * Allows to replace an enqueue action with a dequeue action in one call, avoiding writing to disk twice.
+ */
+public class QueueInfoAction extends DatadogPluginAction {
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java
new file mode 100644
index 000000000..cad240997
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/model/node/StatusAction.java
@@ -0,0 +1,77 @@
+package org.datadog.jenkins.plugins.datadog.model.node;
+
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction;
+import org.datadog.jenkins.plugins.datadog.model.Status;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
+
+public class StatusAction extends DatadogPluginAction {
+
+ private static final long serialVersionUID = 1L;
+
+ private final Status status;
+ private final boolean propagate;
+
+ public StatusAction(Status status, boolean propagate) {
+ this.status = status;
+ this.propagate = propagate;
+ }
+
+ public Status getStatus() {
+ return status;
+ }
+
+ public boolean isPropagate() {
+ return propagate;
+ }
+
+ @Override
+ public String toString() {
+ return "StatusAction{" +
+ "status='" + status + '\'' +
+ ", propagate=" + propagate +
+ '}';
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ StatusAction that = (StatusAction) o;
+ return propagate == that.propagate && status == that.status;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(status, propagate);
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return StatusAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ StatusAction action = (StatusAction) source;
+ writeField("status", action.status, writer, context);
+ writeField("propagate", action.propagate, writer, context);
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ Status status = readField(reader, context, Status.class);
+ boolean propagate = readField(reader, context, boolean.class);
+ return new StatusAction(status, propagate);
+ }
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java b/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java
index 24f3e2468..e2e056930 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/steps/DatadogOptions.java
@@ -14,7 +14,9 @@
import jenkins.YesNoMaybe;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.logs.DatadogTaskListenerDecorator;
+import org.datadog.jenkins.plugins.datadog.model.BuildData;
import org.datadog.jenkins.plugins.datadog.tracer.DatadogTracerJobProperty;
+import org.datadog.jenkins.plugins.datadog.traces.BuildSpanAction;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
import org.jenkinsci.plugins.workflow.log.TaskListenerDecorator;
import org.jenkinsci.plugins.workflow.steps.BodyExecutionCallback;
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java
index 05a05da32..fd5a35285 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/BuildSpanAction.java
@@ -1,31 +1,101 @@
package org.datadog.jenkins.plugins.datadog.traces;
-import hudson.model.InvisibleAction;
-import org.datadog.jenkins.plugins.datadog.model.BuildData;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+import java.util.Objects;
+import org.datadog.jenkins.plugins.datadog.model.DatadogPluginAction;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
-
-import java.io.Serializable;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
/**
* Keeps build span propagation
*/
-public class BuildSpanAction extends InvisibleAction implements Serializable {
+public class BuildSpanAction extends DatadogPluginAction {
private static final long serialVersionUID = 1L;
- private final BuildData buildData;
private final TraceSpan.TraceSpanContext buildSpanContext;
+ private volatile String buildUrl;
- public BuildSpanAction(final BuildData buildData, final TraceSpan.TraceSpanContext buildSpanContext){
- this.buildData = buildData;
+ public BuildSpanAction(final TraceSpan.TraceSpanContext buildSpanContext){
this.buildSpanContext = buildSpanContext;
}
- public BuildData getBuildData() {
- return buildData;
+ public BuildSpanAction(TraceSpan.TraceSpanContext buildSpanContext, String buildUrl) {
+ this.buildSpanContext = buildSpanContext;
+ this.buildUrl = buildUrl;
}
public TraceSpan.TraceSpanContext getBuildSpanContext() {
return buildSpanContext;
}
+
+ public String getBuildUrl() {
+ return buildUrl;
+ }
+
+ public BuildSpanAction setBuildUrl(String buildUrl) {
+ this.buildUrl = buildUrl;
+ return this;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ BuildSpanAction that = (BuildSpanAction) o;
+ return Objects.equals(buildSpanContext, that.buildSpanContext) && Objects.equals(buildUrl, that.buildUrl);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(buildSpanContext, buildUrl);
+ }
+
+ @Override
+ public String toString() {
+ return "BuildSpanAction{" +
+ "buildSpanContext=" + buildSpanContext +
+ ", buildUrl=" + buildUrl +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return BuildSpanAction.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ BuildSpanAction action = (BuildSpanAction) source;
+ writeField("spanContext", action.buildSpanContext, writer, context);
+ if (action.buildUrl != null) {
+ writeField("buildUrl", action.buildUrl, writer, context);
+ }
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ TraceSpan.TraceSpanContext spanContext = readField(reader, context, TraceSpan.TraceSpanContext.class);
+
+ String buildUrl = null;
+ while (reader.hasMoreChildren()) {
+ reader.moveDown();
+ String fieldName = reader.getNodeName();
+ if ("buildUrl".equals(fieldName)) {
+ buildUrl = (String) context.convertAnother(null, String.class);
+ }
+ reader.moveUp();
+ }
+
+ return new BuildSpanAction(spanContext, buildUrl);
+ }
+ }
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java
index d5852d3d2..9c0061674 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBaseBuildLogic.java
@@ -2,22 +2,28 @@
import hudson.model.Cause;
import hudson.model.Run;
+import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
+import java.util.Queue;
import java.util.Set;
+import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import javax.annotation.Nullable;
import net.sf.json.JSONObject;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction;
-import org.datadog.jenkins.plugins.datadog.model.StageBreakdownAction;
import org.datadog.jenkins.plugins.datadog.model.StageData;
import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings;
import org.datadog.jenkins.plugins.datadog.util.json.JsonUtils;
+import org.jenkinsci.plugins.workflow.flow.FlowExecution;
+import org.jenkinsci.plugins.workflow.graph.BlockEndNode;
+import org.jenkinsci.plugins.workflow.graph.BlockStartNode;
+import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.jenkinsci.plugins.workflow.job.WorkflowRun;
/**
* Base class for DatadogTraceBuildLogic and DatadogPipelineBuildLogic
@@ -29,26 +35,7 @@ public abstract class DatadogBaseBuildLogic {
private static final Logger logger = Logger.getLogger(DatadogBaseBuildLogic.class.getName());
@Nullable
- public abstract JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run);
-
- protected String getNodeName(Run, ?> run, BuildData buildData, BuildData updatedBuildData) {
- final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class);
- if(pipelineNodeInfoAction != null){
- return pipelineNodeInfoAction.getNodeName();
- }
-
- return buildData.getNodeName("").isEmpty() ? updatedBuildData.getNodeName("") : buildData.getNodeName("");
- }
-
- protected String getNodeHostname(Run, ?> run, BuildData updatedBuildData) {
- final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class);
- if(pipelineNodeInfoAction != null){
- return pipelineNodeInfoAction.getNodeHostname();
- } else if (!updatedBuildData.getHostname("").isEmpty()) {
- return updatedBuildData.getHostname("");
- }
- return null;
- }
+ public abstract JSONObject toJson(final BuildData buildData, final Run,?> run);
@SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE")
protected Set getNodeLabels(Run,?> run, final String nodeName) {
@@ -57,6 +44,8 @@ protected Set getNodeLabels(Run,?> run, final String nodeName) {
return Collections.emptySet();
}
+ // First examine PipelineNodeInfoAction associated with the build.
+ // The action is populated in step listener based on environment and executor data available for pipeline steps.
final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class);
if(pipelineNodeInfoAction != null) {
return pipelineNodeInfoAction.getNodeLabels();
@@ -96,13 +85,22 @@ protected long getMillisInQueue(BuildData buildData) {
}
protected String getStageBreakdown(Run run) {
- final StageBreakdownAction stageBreakdownAction = run.getAction(StageBreakdownAction.class);
- if(stageBreakdownAction == null) {
+ if (!(run instanceof WorkflowRun)) {
+ return null;
+ }
+
+ WorkflowRun workflowRun = (WorkflowRun) run;
+ FlowExecution execution = workflowRun.getExecution();
+ if (execution == null) {
return null;
}
- final Map stageDataByName = stageBreakdownAction.getStageDataByName();
- final List stages = new ArrayList<>(stageDataByName.values());
+ List currentHeads = execution.getCurrentHeads();
+ if (currentHeads == null || currentHeads.isEmpty()) {
+ return null;
+ }
+
+ final List stages = traverseStages(currentHeads);
Collections.sort(stages);
final String stagesJson = JsonUtils.toJson(new ArrayList<>(stages));
@@ -114,6 +112,41 @@ protected String getStageBreakdown(Run run) {
return stagesJson;
}
+ private List traverseStages(List heads) {
+ List stages = new ArrayList<>();
+ Queue nodes = new ArrayDeque<>(heads);
+ while (!nodes.isEmpty()) {
+ FlowNode node = nodes.poll();
+ nodes.addAll(node.getParents());
+
+ if (!(node instanceof BlockEndNode)) {
+ continue;
+ }
+
+ BlockEndNode> endNode = (BlockEndNode>) node;
+ BlockStartNode startNode = endNode.getStartNode();
+ if (!DatadogUtilities.isStageNode(startNode)) {
+ continue;
+ }
+
+ long startTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(startNode));
+ long endTimeMicros = TimeUnit.MILLISECONDS.toMicros(DatadogUtilities.getTimeMillis(endNode));
+ if (startTimeMicros <= 0 || endTimeMicros <= 0) {
+ logger.fine("Skipping stage " + startNode.getDisplayName() + " because it has no time info " +
+ "(start: " + startTimeMicros + ", end: " + endTimeMicros + ")");
+ continue;
+ }
+
+ StageData stageData = new StageData.Builder()
+ .withName(startNode.getDisplayName())
+ .withStartTimeInMicros(startTimeMicros)
+ .withEndTimeInMicros(endTimeMicros)
+ .build();
+ stages.add(stageData);
+ }
+ return stages;
+ }
+
// Returns true if the run causes contains a Cause.UserIdCause
public boolean isTriggeredManually(Run run) {
final List causes = run.getCauses();
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java
index 54526597d..64af28e16 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogBasePipelineLogic.java
@@ -1,28 +1,17 @@
package org.datadog.jenkins.plugins.datadog.traces;
import hudson.model.Run;
-import java.util.Collection;
+import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
import java.util.Set;
-import java.util.concurrent.TimeoutException;
-import java.util.logging.Logger;
-import javax.annotation.Nonnull;
import net.sf.json.JSONObject;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
-import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipeline;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
-import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.model.PipelineNodeInfoAction;
import org.datadog.jenkins.plugins.datadog.util.SuppressFBWarnings;
-import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
-import org.jenkinsci.plugins.workflow.graph.FlowEndNode;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
-import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner;
+
/**
* Base class with shared code for DatadogTracePipelineLogic and DatadogWebhookPipelineLogic
@@ -31,49 +20,33 @@ public abstract class DatadogBasePipelineLogic {
protected static final String CI_PROVIDER = "jenkins";
protected static final String HOSTNAME_NONE = "none";
- private static final Logger logger = Logger.getLogger(DatadogBasePipelineLogic.class.getName());
-
- @Nonnull
- public abstract Collection execute(FlowNode flowNode, Run, ?> run);
-
- protected BuildPipelineNode buildPipelineTree(FlowEndNode flowEndNode) {
-
- final BuildPipeline pipeline = new BuildPipeline();
- // As this logic is evaluated in the last node of the graph,
- // getCurrentHeads() method returns all nodes as a plain list.
- final List currentHeads = flowEndNode.getExecution().getCurrentHeads();
-
- // Provided that plain list of nodes, the DepthFirstScanner algorithm
- // is used to visit efficiently every node in form of a DAG.
- final DepthFirstScanner scanner = new DepthFirstScanner();
- scanner.setup(currentHeads);
-
- // Every found flow node of the DAG is added to the BuildPipeline instance.
- scanner.forEach(pipeline::add);
-
- return pipeline.buildTree(); // returns the root node
- }
+ public abstract JSONObject toJson(PipelineStepData current, Run, ?> run) throws IOException, InterruptedException;
@SuppressFBWarnings("NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE")
- protected Set getNodeLabels(Run run, BuildPipelineNode current, String nodeName) {
+ protected Set getNodeLabels(Run run, PipelineStepData current, String nodeName) {
final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class);
if (current.getNodeLabels() != null && !current.getNodeLabels().isEmpty()) {
+ // First examine if current step has info about the node it was executed on.
return current.getNodeLabels();
+
} else if (pipelineNodeInfoAction != null && !pipelineNodeInfoAction.getNodeLabels().isEmpty()) {
+ // Examine PipelineNodeInfoAction associated with the pipeline.
+ // The action is populated in step listener based on environment and executor data available for pipeline steps.
return pipelineNodeInfoAction.getNodeLabels();
}
- if (run.getExecutor() != null && run.getExecutor().getOwner() != null) {
- Set nodeLabels = DatadogUtilities.getNodeLabels(run.getExecutor().getOwner());
- if (nodeLabels != null && !nodeLabels.isEmpty()) {
- return nodeLabels;
+ if (DatadogUtilities.isMainNode(nodeName)) {
+ // executor owner is the master node even if the pipeline contains an "agent" block
+ if (run.getExecutor() != null) {
+ Set nodeLabels = DatadogUtilities.getNodeLabels(run.getExecutor().getOwner());
+ if (!nodeLabels.isEmpty()) {
+ return nodeLabels;
+ }
}
- }
- // If there is no labels and the node name is master,
- // we force the label "master".
- if (DatadogUtilities.isMainNode(nodeName)) {
+ // If there is no labels and the node name is master,
+ // we force the label "master".
final Set masterLabels = new HashSet<>();
masterLabels.add("master");
return masterLabels;
@@ -82,64 +55,30 @@ protected Set getNodeLabels(Run run, BuildPipelineNode current, String n
return Collections.emptySet();
}
- protected String getNodeName(Run, ?> run, BuildPipelineNode current, BuildData buildData) {
- final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class);
-
- if(current.getNodeName() != null) {
+ protected String getNodeName(PipelineStepData current, BuildData buildData) {
+ if (current.getNodeName() != null) {
return current.getNodeName();
- } else if (pipelineNodeInfoAction != null) {
- return pipelineNodeInfoAction.getNodeName();
}
-
- return buildData.getNodeName("");
+ // It seems like "built-in" node as the default value does not have much practical sense.
+ // It is done to preserve existing behavior (note that this logic is not applied to metrics - also to preserve the plugin's existing behavior).
+ // The mechanism before the changes was the following:
+ // - DatadogBuildListener#onInitialize created a BuildData instance
+ // - that BuildData had its nodeName populated from environment variables obtained from Run
+ // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback)
+ // For pipelines, the environment variables that Run#getEnvironment returns at the beginning of the run always (!) contain NODE_NAME = "built-in" (when invoked at the end of the run, the env will have a different set of variables).
+ // This is true regardless of whether the pipeline definition has a top-level agent block or not.
+ // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build's lifecycle.
+ return buildData.getNodeName("built-in");
}
- protected String getNodeHostname(Run, ?> run, BuildPipelineNode current) {
- final PipelineNodeInfoAction pipelineNodeInfoAction = run.getAction(PipelineNodeInfoAction.class);
- if(current.getNodeHostname() != null) {
+ protected String getNodeHostname(PipelineStepData current, BuildData buildData) {
+ if (current.getNodeHostname() != null) {
return current.getNodeHostname();
- } else if (pipelineNodeInfoAction != null) {
- return pipelineNodeInfoAction.getNodeHostname();
- }
- return null;
- }
-
- protected boolean isTraceable(BuildPipelineNode node) {
- if (node.getStartTimeMicros() == -1L) {
- logger.severe("Unable to send trace of node: " + node.getName() + ". Start Time is not set");
- return false;
- }
-
- if(node.getEndTimeMicros() == -1L) {
- logger.severe("Unable to send trace of node: " + node.getName() + ". End Time is not set");
- return false;
- }
-
- if(node.isInternal()){
- logger.fine("Node: " + node.getName() + " is Jenkins internal. We skip it.");
- return false;
- }
-
- return true;
- }
-
- protected void updateCIGlobalTags(Run run) {
- long start = System.currentTimeMillis();
- try {
- final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class);
- if(ciGlobalTagsAction == null) {
- return;
- }
-
- final Map tags = TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run));
- ciGlobalTagsAction.putAll(tags);
- } finally {
- long end = System.currentTimeMillis();
- DatadogAudit.log("DatadogTracePipelineLogic.updateCIGlobalTags", start, end);
}
+ return buildData.getHostname("");
}
- protected String buildOperationName(BuildPipelineNode current) {
- return CI_PROVIDER + "." + current.getType().name().toLowerCase() + ((current.isInternal()) ? ".internal" : "");
+ protected String buildOperationName(PipelineStepData current) {
+ return CI_PROVIDER + "." + current.getType().name().toLowerCase();
}
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java
index 518abceda..211467c07 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTraceBuildLogic.java
@@ -1,7 +1,6 @@
package org.datadog.jenkins.plugins.datadog.traces;
import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.statusFromResult;
-import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.toJson;
import static org.datadog.jenkins.plugins.datadog.traces.CITags.Values.ORIGIN_CIAPP_PIPELINE;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.filterSensitiveInfo;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch;
@@ -10,21 +9,23 @@
import hudson.model.Result;
import hudson.model.Run;
+import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
+import javax.annotation.Nullable;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
-import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
+import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
/**
- * Keeps the logic to send traces related to Jenkins Build.
+ * Keeps the logic to create traces related to Jenkins Build.
* This gets called once per job (datadog level: pipeline)
*/
public class DatadogTraceBuildLogic extends DatadogBaseBuildLogic {
@@ -33,14 +34,16 @@ public class DatadogTraceBuildLogic extends DatadogBaseBuildLogic {
private final JsonTraceSpanMapper jsonTraceSpanMapper = new JsonTraceSpanMapper();
+ @Nullable
@Override
- public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run) {
- TraceSpan span = createSpan(buildData, run);
+ public JSONObject toJson(final BuildData buildData, final Run,?> run) {
+ TraceSpan span = toSpan(buildData, run);
return span != null ? jsonTraceSpanMapper.map(span) : null;
}
+ @Nullable
// hook for tests
- public TraceSpan createSpan(final BuildData buildData, final Run,?> run) {
+ public TraceSpan toSpan(final BuildData buildData, final Run,?> run) {
if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) {
return null;
}
@@ -55,14 +58,8 @@ public TraceSpan createSpan(final BuildData buildData, final Run,?> run) {
return null;
}
- // In this point of the execution, the BuildData stored within
- // BuildSpanAction has been updated by the information available
- // inside the Pipeline steps. (Only applicable if the build is
- // based on Jenkins Pipelines).
- final BuildData updatedBuildData = buildSpanAction.getBuildData();
-
- final String prefix = BuildPipelineNode.NodeType.PIPELINE.getTagName();
- final String buildLevel = BuildPipelineNode.NodeType.PIPELINE.getBuildLevel();
+ final String prefix = PipelineStepData.StepType.PIPELINE.getTagName();
+ final String buildLevel = PipelineStepData.StepType.PIPELINE.getBuildLevel();
final long endTimeMicros = buildData.getEndTime(0L) * 1000;
buildSpan.setServiceName(DatadogUtilities.getDatadogGlobalDescriptor().getCiInstanceName());
@@ -80,44 +77,46 @@ public TraceSpan createSpan(final BuildData buildData, final Run,?> run) {
buildSpan.putMeta(prefix + CITags._ID, buildData.getBuildTag(""));
buildSpan.putMeta(prefix + CITags._NUMBER, buildData.getBuildNumber(""));
buildSpan.putMeta(prefix + CITags._URL, buildData.getBuildUrl(""));
- buildSpan.putMetric(CITags.QUEUE_TIME, TimeUnit.MILLISECONDS.toSeconds(getMillisInQueue(updatedBuildData)));
+ buildSpan.putMetric(CITags.QUEUE_TIME, TimeUnit.MILLISECONDS.toSeconds(getMillisInQueue(buildData)));
// Pipeline Parameters
if(!buildData.getBuildParameters().isEmpty()) {
- buildSpan.putMeta(CITags.CI_PARAMETERS, toJson(buildData.getBuildParameters()));
- }
-
- final String workspace = buildData.getWorkspace("").isEmpty() ? updatedBuildData.getWorkspace("") : buildData.getWorkspace("");
- buildSpan.putMeta(CITags.WORKSPACE_PATH, workspace);
-
- final String nodeName = getNodeName(run, buildData, updatedBuildData);
+ buildSpan.putMeta(CITags.CI_PARAMETERS, DatadogUtilities.toJson(buildData.getBuildParameters()));
+ }
+
+ // It seems like "built-in" node as the default value does not have much practical sense.
+ // It is done to preserve existing behavior (note that this logic is not applied to metrics - also to preserve the plugin's existing behavior).
+ // The mechanism before the changes was the following:
+ // - DatadogBuildListener#onInitialize created a BuildData instance
+ // - that BuildData had its nodeName populated from environment variables obtained from Run
+ // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback)
+ // For pipelines, the environment variables that Run#getEnvironment returns at the beginning of the run always (!) contain NODE_NAME = "built-in" (when invoked at the end of the run, the env will have a different set of variables).
+ // This is true regardless of whether the pipeline definition has a top-level agent block or not.
+ // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build's lifecycle.
+ String nodeName = buildData.getNodeName("built-in");
+ buildSpan.putMeta(CITags.WORKSPACE_PATH, buildData.getWorkspace(""));
buildSpan.putMeta(CITags.NODE_NAME, nodeName);
- final String nodeLabelsJson = toJson(getNodeLabels(run, nodeName));
- if(!nodeLabelsJson.isEmpty()){
+ final String nodeLabelsJson = DatadogUtilities.toJson(getNodeLabels(run, buildData.getNodeName("")));
+ if(nodeLabelsJson != null && !nodeLabelsJson.isEmpty()){
buildSpan.putMeta(CITags.NODE_LABELS, nodeLabelsJson);
+ } else {
+ buildSpan.putMeta(CITags.NODE_LABELS, "[]");
}
// If the NodeName == "master", we don't set _dd.hostname. It will be overridden by the Datadog Agent. (Traces are only available using Datadog Agent)
if(!DatadogUtilities.isMainNode(nodeName)) {
- final String workerHostname = getNodeHostname(run, updatedBuildData);
- // If the worker hostname is equals to controller hostname but the node name is not master/built-in then we
- // could not detect the worker hostname properly. Check if it's set in the environment, otherwise set to none.
- if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) {
- String envHostnameOrNone = DatadogUtilities.getHostnameFromWorkerEnv(run).orElse(HOSTNAME_NONE);
- buildSpan.putMeta(CITags._DD_HOSTNAME, envHostnameOrNone);
- } else {
- buildSpan.putMeta(CITags._DD_HOSTNAME, (workerHostname != null) ? workerHostname : HOSTNAME_NONE);
- }
+ final String workerHostname = buildData.getHostname("");
+ buildSpan.putMeta(CITags._DD_HOSTNAME, !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE);
}
// Git Info
- final String gitUrl = buildData.getGitUrl("").isEmpty() ? updatedBuildData.getGitUrl("") : buildData.getGitUrl("");
+ final String gitUrl = buildData.getGitUrl("");
if(StringUtils.isNotEmpty(gitUrl)){
buildSpan.putMeta(CITags.GIT_REPOSITORY_URL, filterSensitiveInfo(gitUrl));
}
- final String gitCommit = buildData.getGitCommit("").isEmpty() ? updatedBuildData.getGitCommit("") : buildData.getGitCommit("");
+ final String gitCommit = buildData.getGitCommit("");
if(!isValidCommit(gitCommit)) {
logger.warning("Couldn't find a valid commit for pipelineID '"+buildData.getBuildTag("")+"'. GIT_COMMIT environment variable was not found or has invalid SHA1 string: " + gitCommit);
}
@@ -127,47 +126,47 @@ public TraceSpan createSpan(final BuildData buildData, final Run,?> run) {
buildSpan.putMeta(CITags.GIT_COMMIT_SHA, gitCommit);
}
- final String gitMessage = buildData.getGitMessage("").isEmpty() ? updatedBuildData.getGitMessage("") : buildData.getGitMessage("");
+ final String gitMessage = buildData.getGitMessage("");
if(StringUtils.isNotEmpty(gitMessage)){
buildSpan.putMeta(CITags.GIT_COMMIT_MESSAGE, gitMessage);
}
- final String gitAuthor = buildData.getGitAuthorName("").isEmpty() ? updatedBuildData.getGitAuthorName("") : buildData.getGitAuthorName("");
+ final String gitAuthor = buildData.getGitAuthorName("");
if(StringUtils.isNotEmpty(gitAuthor)){
buildSpan.putMeta(CITags.GIT_COMMIT_AUTHOR_NAME, gitAuthor);
}
- final String gitAuthorEmail = buildData.getGitAuthorEmail("").isEmpty() ? updatedBuildData.getGitAuthorEmail("") : buildData.getGitAuthorEmail("");
+ final String gitAuthorEmail = buildData.getGitAuthorEmail("");
if(StringUtils.isNotEmpty(gitAuthorEmail)){
buildSpan.putMeta(CITags.GIT_COMMIT_AUTHOR_EMAIL, gitAuthorEmail);
}
- final String gitAuthorDate = buildData.getGitAuthorDate("").isEmpty() ? updatedBuildData.getGitAuthorDate("") : buildData.getGitAuthorDate("");
+ final String gitAuthorDate = buildData.getGitAuthorDate("");
if(StringUtils.isNotEmpty(gitAuthorDate)){
buildSpan.putMeta(CITags.GIT_COMMIT_AUTHOR_DATE, gitAuthorDate);
}
- final String gitCommitter = buildData.getGitCommitterName("").isEmpty() ? updatedBuildData.getGitCommitterName("") : buildData.getGitCommitterName("");
+ final String gitCommitter = buildData.getGitCommitterName("");
if(StringUtils.isNotEmpty(gitCommitter)){
buildSpan.putMeta(CITags.GIT_COMMIT_COMMITTER_NAME, gitCommitter);
}
- final String gitCommitterEmail = buildData.getGitCommitterEmail("").isEmpty() ? updatedBuildData.getGitCommitterEmail("") : buildData.getGitCommitterEmail("");
+ final String gitCommitterEmail = buildData.getGitCommitterEmail("");
if(StringUtils.isNotEmpty(gitCommitterEmail)){
buildSpan.putMeta(CITags.GIT_COMMIT_COMMITTER_EMAIL, gitCommitterEmail);
}
- final String gitCommitterDate = buildData.getGitCommitterDate("").isEmpty() ? updatedBuildData.getGitCommitterDate("") : buildData.getGitCommitterDate("");
+ final String gitCommitterDate = buildData.getGitCommitterDate("");
if(StringUtils.isNotEmpty(gitCommitterDate)){
buildSpan.putMeta(CITags.GIT_COMMIT_COMMITTER_DATE, gitCommitterDate);
}
- final String gitDefaultBranch = buildData.getGitDefaultBranch("").isEmpty() ? updatedBuildData.getGitDefaultBranch("") : buildData.getGitDefaultBranch("");
+ final String gitDefaultBranch = buildData.getGitDefaultBranch("");
if(StringUtils.isNotEmpty(gitDefaultBranch)){
buildSpan.putMeta(CITags.GIT_DEFAULT_BRANCH, gitDefaultBranch);
}
- final String rawGitBranch = buildData.getBranch("").isEmpty() ? updatedBuildData.getBranch("") : buildData.getBranch("");
+ final String rawGitBranch = buildData.getBranch("");
final String gitBranch = normalizeBranch(rawGitBranch);
if(StringUtils.isNotEmpty(gitBranch)) {
buildSpan.putMeta(CITags.GIT_BRANCH, gitBranch);
@@ -175,7 +174,7 @@ public TraceSpan createSpan(final BuildData buildData, final Run,?> run) {
// Check if the user set manually the DD_GIT_TAG environment variable.
// Otherwise, Jenkins reports the tag in the Git branch information. (e.g. origin/tags/0.1.0)
- final String gitTag = Optional.of(buildData.getGitTag("").isEmpty() ? updatedBuildData.getGitTag("") : buildData.getGitTag(""))
+ final String gitTag = Optional.of(buildData.getGitTag(""))
.filter(tag -> !tag.isEmpty())
.orElse(normalizeTag(rawGitBranch));
if(StringUtils.isNotEmpty(gitTag)) {
@@ -214,20 +213,18 @@ public TraceSpan createSpan(final BuildData buildData, final Run,?> run) {
buildSpan.setError(true);
}
- // CI Tags propagation
- final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class);
- if(ciGlobalTagsAction != null) {
- final Map tags = ciGlobalTagsAction.getTags();
- for(Map.Entry tagEntry : tags.entrySet()) {
- buildSpan.putMeta(tagEntry.getKey(), tagEntry.getValue());
- }
+ Map globalTags = new HashMap<>(buildData.getTagsForTraces());
+ globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run)));
+
+ for(Map.Entry tagEntry : globalTags.entrySet()) {
+ buildSpan.putMeta(tagEntry.getKey(), tagEntry.getValue());
}
// If the build is a Jenkins Pipeline, the queue time is included in the root span duration.
// We need to adjust the endTime of the root span subtracting the queue time reported by its child span.
// The propagated queue time is set DatadogTracePipelineLogic#updateBuildData method.
// The queue time reported by DatadogBuildListener#onStarted method is not included in the root span duration.
- final long propagatedMillisInQueue = Math.max(updatedBuildData.getPropagatedMillisInQueue(-1L), 0);
+ final long propagatedMillisInQueue = Math.max(buildData.getPropagatedMillisInQueue(-1L), 0);
// Although the queue time happens before the span startTime, we cannot remove it from the startTime
// because there is no API to do it at the end of the trace. Additionally, we cannot create the root span
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java
index cc6ae4596..839065bf1 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogTracePipelineLogic.java
@@ -1,36 +1,29 @@
package org.datadog.jenkins.plugins.datadog.traces;
-import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.cleanUpTraceActions;
-import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.statusFromResult;
import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.toJson;
-import static org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode.NodeType.PIPELINE;
+import static org.datadog.jenkins.plugins.datadog.model.PipelineStepData.StepType.PIPELINE;
import static org.datadog.jenkins.plugins.datadog.traces.CITags.Values.ORIGIN_CIAPP_PIPELINE;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.filterSensitiveInfo;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag;
import hudson.model.Run;
+import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
-import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
+import org.datadog.jenkins.plugins.datadog.model.Status;
import org.datadog.jenkins.plugins.datadog.traces.mapper.JsonTraceSpanMapper;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
-import org.datadog.jenkins.plugins.datadog.util.git.GitUtils;
-import org.jenkinsci.plugins.workflow.graph.FlowEndNode;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
/**
@@ -43,53 +36,15 @@ public class DatadogTracePipelineLogic extends DatadogBasePipelineLogic {
@Nonnull
@Override
- public Collection execute(FlowNode flowNode, Run run) {
- Collection traces = collectTraces(flowNode, run);
- return traces.stream().map(jsonTraceSpanMapper::map).collect(Collectors.toList());
+ public JSONObject toJson(PipelineStepData flowNode, Run, ?> run) throws IOException, InterruptedException {
+ TraceSpan span = toSpan(flowNode, run);
+ return jsonTraceSpanMapper.map(span);
}
// hook for tests
- public Collection collectTraces(FlowNode flowNode, Run run) {
- if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) {
- return Collections.emptySet();
- }
-
- final IsPipelineAction isPipelineAction = run.getAction(IsPipelineAction.class);
- if(isPipelineAction == null) {
- run.addAction(new IsPipelineAction());
- }
-
- final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class);
- if(buildSpanAction == null) {
- return Collections.emptySet();
- }
-
- final BuildData buildData = buildSpanAction.getBuildData();
- if(!DatadogUtilities.isLastNode(flowNode)){
- updateCIGlobalTags(run);
- return Collections.emptySet();
- }
-
- final TraceSpan.TraceSpanContext traceSpanContext = buildSpanAction.getBuildSpanContext();
- final BuildPipelineNode root = buildPipelineTree((FlowEndNode) flowNode);
-
- try {
- return collectTraces(run, buildData, root, traceSpanContext);
- } finally {
- // Explicit removal of InvisibleActions used to collect Traces when the Run finishes.
- cleanUpTraceActions(run);
- }
- }
-
- private Collection collectTraces(final Run, ?> run, final BuildData buildData, final BuildPipelineNode current, final TraceSpan.TraceSpanContext parentSpanContext) {
- if(!isTraceable(current)) {
- Collection traces = new ArrayList<>();
- // If the current node is not traceable, we continue with its children
- for(final BuildPipelineNode child : current.getChildren()) {
- traces.addAll(collectTraces(run, buildData, child, parentSpanContext));
- }
- return traces;
- }
+ @Nonnull
+ public TraceSpan toSpan(PipelineStepData current, Run, ?> run) throws IOException, InterruptedException {
+ BuildData buildData = new BuildData(run, DatadogUtilities.getTaskListener(run));
// If the root span has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines spans
// because this time will be subtracted in the root span. See DatadogTraceBuildLogic#finishBuildTrace method.
@@ -98,8 +53,8 @@ private Collection collectTraces(final Run, ?> run, final BuildData
final long fixedEndTimeNanos = TimeUnit.MICROSECONDS.toNanos(current.getEndTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue));
// At this point, the current node is traceable.
- final TraceSpan.TraceSpanContext spanContext = new TraceSpan.TraceSpanContext(parentSpanContext.getTraceId(), parentSpanContext.getSpanId(), current.getSpanId());
- final TraceSpan span = new TraceSpan(buildOperationName(current), fixedStartTimeNanos + DatadogUtilities.getNanosInQueue(current), spanContext);
+ final TraceSpan.TraceSpanContext spanContext = new TraceSpan.TraceSpanContext(current.getTraceId(), current.getParentSpanId(), current.getSpanId());
+ final TraceSpan span = new TraceSpan(buildOperationName(current), fixedStartTimeNanos + current.getNanosInQueue(), spanContext);
span.setServiceName(DatadogUtilities.getDatadogGlobalDescriptor().getCiInstanceName());
span.setResourceName(current.getName());
span.setType("ci");
@@ -126,65 +81,60 @@ private Collection collectTraces(final Run, ?> run, final BuildData
}
}
- Collection traces = new ArrayList<>();
- for(final BuildPipelineNode child : current.getChildren()) {
- traces.addAll(collectTraces(run, buildData, child, span.context()));
- }
-
//Logs
//NOTE: Implement sendNodeLogs
span.setEndNano(fixedEndTimeNanos);
-
- traces.add(span);
- return traces;
+ return span;
}
- private Map buildTraceMetrics(BuildPipelineNode current) {
+ private Map buildTraceMetrics(PipelineStepData current) {
final Map metrics = new HashMap<>();
- metrics.put(CITags.QUEUE_TIME, TimeUnit.NANOSECONDS.toSeconds(DatadogUtilities.getNanosInQueue(current)));
+ metrics.put(CITags.QUEUE_TIME, TimeUnit.NANOSECONDS.toSeconds(current.getNanosInQueue()));
return metrics;
}
- private Map buildTraceTags(final Run run, final BuildPipelineNode current, final BuildData buildData) {
+ private Map buildTraceTags(final Run, ?> run, final PipelineStepData current, final BuildData buildData) {
final String prefix = current.getType().getTagName();
final String buildLevel = current.getType().getBuildLevel();
- final Map envVars = current.getEnvVars();
final Map tags = new HashMap<>();
tags.put(CITags.CI_PROVIDER_NAME, CI_PROVIDER);
tags.put(CITags._DD_ORIGIN, ORIGIN_CIAPP_PIPELINE);
tags.put(prefix + CITags._NAME, current.getName());
tags.put(prefix + CITags._NUMBER, current.getId());
- final String status = statusFromResult(current.getResult());
- tags.put(prefix + CITags._RESULT, status);
- tags.put(CITags.STATUS, status);
+ Status status = current.getStatus();
+ tags.put(prefix + CITags._RESULT, status.toTag());
+ tags.put(CITags.STATUS, status.toTag());
// Pipeline Parameters
if(!buildData.getBuildParameters().isEmpty()) {
- tags.put(CITags.CI_PARAMETERS, toJson(buildData.getBuildParameters()));
+ tags.put(CITags.CI_PARAMETERS, DatadogUtilities.toJson(buildData.getBuildParameters()));
}
- final String url = envVars.get("BUILD_URL") != null ? envVars.get("BUILD_URL") : buildData.getBuildUrl("");
+ String url = buildData.getBuildUrl("");
if(StringUtils.isNotBlank(url)) {
tags.put(prefix + CITags._URL, url + "execution/node/"+current.getId()+"/");
}
- final String workspace = current.getWorkspace() != null ? current.getWorkspace() : buildData.getWorkspace("");
+ final String workspace = firstNonNull(current.getWorkspace(), buildData.getWorkspace(""));
tags.put(CITags.WORKSPACE_PATH, workspace);
- tags.put(CITags._DD_CI_INTERNAL, current.isInternal());
- if(!current.isInternal()) {
- tags.put(CITags._DD_CI_BUILD_LEVEL, buildLevel);
- tags.put(CITags._DD_CI_LEVEL, buildLevel);
+ tags.put(CITags._DD_CI_INTERNAL, false);
+ tags.put(CITags._DD_CI_BUILD_LEVEL, buildLevel);
+ tags.put(CITags._DD_CI_LEVEL, buildLevel);
+
+ String jenkinsResult = current.getJenkinsResult();
+ if (jenkinsResult != null) {
+ tags.put(CITags.JENKINS_RESULT, jenkinsResult.toLowerCase());
}
- tags.put(CITags.JENKINS_RESULT, current.getResult().toLowerCase());
+
tags.put(CITags.ERROR, String.valueOf(current.isError() || current.isUnstable()));
//Git Info
- final String rawGitBranch = GitUtils.resolveGitBranch(envVars, buildData);
- String gitBranch = null;
- String gitTag = null;
+ String rawGitBranch = buildData.getBranch("");
+ String gitBranch;
+ String gitTag;
if(rawGitBranch != null && !rawGitBranch.isEmpty()) {
gitBranch = normalizeBranch(rawGitBranch);
if(gitBranch != null) {
@@ -199,7 +149,7 @@ private Map buildTraceTags(final Run run, final BuildPipelineNod
// If the user set DD_GIT_TAG manually,
// we override the git.tag value.
- gitTag = GitUtils.resolveGitTag(envVars, buildData);
+ gitTag = buildData.getGitTag("");
if(StringUtils.isNotEmpty(gitTag)){
tags.put(CITags.GIT_TAG, gitTag);
}
@@ -208,40 +158,36 @@ private Map buildTraceTags(final Run run, final BuildPipelineNod
// If we could not detect a valid commit, that means that the GIT_COMMIT environment variable
// was overridden by the user at top level, so we set the content what we have (despite it's not valid).
// We will show a logger.warning at the end of the pipeline.
- final String gitCommit = GitUtils.resolveGitCommit(envVars, buildData);
+ String gitCommit = buildData.getGitCommit("");
if(gitCommit != null && !gitCommit.isEmpty()) {
tags.put(CITags.GIT_COMMIT__SHA, gitCommit); //Maintain retrocompatibility
tags.put(CITags.GIT_COMMIT_SHA, gitCommit);
}
- final String gitRepoUrl = GitUtils.resolveGitRepositoryUrl(envVars, buildData);
+ String gitRepoUrl = buildData.getGitUrl("");
if (gitRepoUrl != null && !gitRepoUrl.isEmpty()) {
tags.put(CITags.GIT_REPOSITORY_URL, filterSensitiveInfo(gitRepoUrl));
}
// User info
- final String user = envVars.get("USER") != null ? envVars.get("USER") : buildData.getUserId();
+ String user = buildData.getUserId();
tags.put(CITags.USER_NAME, user);
// Node info
- final String nodeName = getNodeName(run, current, buildData);
+ final String nodeName = getNodeName(current, buildData);
tags.put(CITags.NODE_NAME, nodeName);
- final String nodeLabels = toJson(getNodeLabels(run, current, nodeName));
- if(!nodeLabels.isEmpty()){
+ final String nodeLabels = DatadogUtilities.toJson(getNodeLabels(run, current, nodeName));
+ if(nodeLabels != null && !nodeLabels.isEmpty()){
tags.put(CITags.NODE_LABELS, nodeLabels);
+ } else {
+ tags.put(CITags.NODE_LABELS, "[]");
}
// If the NodeName == "master", we don't set _dd.hostname. It will be overridden by the Datadog Agent. (Traces are only available using Datadog Agent)
if(!DatadogUtilities.isMainNode(nodeName)) {
- final String workerHostname = getNodeHostname(run, current);
- // If the worker hostname is equals to controller hostname but the node name is not "master"
- // then we could not detect the worker hostname properly. We set _dd.hostname to 'none' explicitly.
- if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) {
- tags.put(CITags._DD_HOSTNAME, HOSTNAME_NONE);
- } else {
- tags.put(CITags._DD_HOSTNAME, (workerHostname != null) ? workerHostname : HOSTNAME_NONE);
- }
+ final String workerHostname = getNodeHostname(current, buildData);
+ tags.put(CITags._DD_HOSTNAME, !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE);
}
// Arguments
@@ -273,20 +219,20 @@ private Map buildTraceTags(final Run run, final BuildPipelineNod
tags.put(PIPELINE.getTagName() + CITags._ID, buildData.getBuildTag(""));
// Propagate Stage Name
- if(!BuildPipelineNode.NodeType.STAGE.equals(current.getType()) && current.getStageName() != null) {
- tags.put(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME, current.getStageName());
+ if(!PipelineStepData.StepType.STAGE.equals(current.getType()) && current.getStageName() != null) {
+ tags.put(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME, current.getStageName());
}
// CI Tags propagation
- final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class);
- if(ciGlobalTagsAction != null) {
- final Map globalTags = ciGlobalTagsAction.getTags();
- for(Map.Entry globalTagEntry : globalTags.entrySet()) {
- tags.put(globalTagEntry.getKey(), globalTagEntry.getValue());
- }
- }
+ Map globalTags = new HashMap<>(buildData.getTagsForTraces());
+ globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run)));
+ tags.putAll(globalTags);
return tags;
}
+ private T firstNonNull(T first, T second) {
+ return first != null ? first : second;
+ }
+
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java
index 6e0cc9313..6990cd30c 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookBuildLogic.java
@@ -8,18 +8,20 @@
import hudson.model.Run;
import java.util.Date;
+import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.logging.Logger;
+import javax.annotation.Nullable;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
-import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
+import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
/**
* Keeps the logic to send webhooks related to Jenkins Build.
@@ -29,8 +31,9 @@ public class DatadogWebhookBuildLogic extends DatadogBaseBuildLogic {
private static final Logger logger = Logger.getLogger(DatadogWebhookBuildLogic.class.getName());
+ @Nullable
@Override
- public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run) {
+ public JSONObject toJson(final BuildData buildData, final Run,?> run) {
if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) {
return null;
}
@@ -45,18 +48,12 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run
return null;
}
- // In this point of the execution, the BuildData stored within
- // BuildSpanAction has been updated by the information available
- // inside the Pipeline steps by DatadogWebhookPipelineLogic.
- // (Only applicable if the build is based on Jenkins Pipelines).
- final BuildData updatedBuildData = buildSpanAction.getBuildData();
-
final long startTimeMillis = buildData.getStartTime(0L);
// If the build is a Jenkins Pipeline, the queue time is included in the root duration.
// We need to adjust the endTime of the root subtracting the queue time reported by its children.
// The propagated queue time is set DatadogTracePipelineLogic#updateBuildData method.
// The queue time reported by DatadogBuildListener#onStarted method is not included in the root duration.
- final long propagatedMillisInQueue = Math.max(updatedBuildData.getPropagatedMillisInQueue(-1L), 0);
+ final long propagatedMillisInQueue = Math.max(buildData.getPropagatedMillisInQueue(-1L), 0);
// Although the queue time happens before the startTime, we cannot remove it from the startTime
// because there is no API to do it at the end of the trace. Additionally, we cannot create the root
// at the end of the build, because we would lose the logs correlation.
@@ -65,22 +62,22 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run
final long endTimeMillis = buildData.getEndTime(0L) - propagatedMillisInQueue;
final String jenkinsResult = buildData.getResult("");
final String status = statusFromResult(jenkinsResult);
- final String prefix = BuildPipelineNode.NodeType.PIPELINE.getTagName();
- final String rawGitBranch = buildData.getBranch("").isEmpty() ? updatedBuildData.getBranch("") : buildData.getBranch("");
+ final String prefix = PipelineStepData.StepType.PIPELINE.getTagName();
+ final String rawGitBranch = buildData.getBranch("");
final String gitBranch = normalizeBranch(rawGitBranch);
// Check if the user set manually the DD_GIT_TAG environment variable.
// Otherwise, Jenkins reports the tag in the Git branch information. (e.g. origin/tags/0.1.0)
- final String gitTag = Optional.of(buildData.getGitTag("").isEmpty() ? updatedBuildData.getGitTag("") : buildData.getGitTag(""))
+ final String gitTag = Optional.of(buildData.getGitTag(""))
.filter(tag -> !tag.isEmpty())
.orElse(normalizeTag(rawGitBranch));
JSONObject payload = new JSONObject();
- payload.put("level", BuildPipelineNode.NodeType.PIPELINE.getBuildLevel());
+ payload.put("level", PipelineStepData.StepType.PIPELINE.getBuildLevel());
payload.put("url", buildData.getBuildUrl(""));
payload.put("start", DatadogUtilities.toISO8601(new Date(startTimeMillis)));
payload.put("end", DatadogUtilities.toISO8601(new Date(endTimeMillis)));
payload.put("partial_retry", false);
- payload.put("queue_time", getMillisInQueue(updatedBuildData));
+ payload.put("queue_time", getMillisInQueue(buildData));
payload.put("status", status);
payload.put("is_manual", isTriggeredManually(run));
@@ -115,12 +112,11 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run
{
JSONArray tagsPayload = new JSONArray();
- final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class);
- if(ciGlobalTagsAction != null) {
- final Map tags = ciGlobalTagsAction.getTags();
- for(Map.Entry tagEntry : tags.entrySet()) {
- tagsPayload.add(tagEntry.getKey() + ":" + tagEntry.getValue());
- }
+ Map globalTags = new HashMap<>(buildData.getTagsForTraces());
+ globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run)));
+
+ for(Map.Entry tagEntry : globalTags.entrySet()) {
+ tagsPayload.add(tagEntry.getKey() + ":" + tagEntry.getValue());
}
// Jenkins specific
@@ -158,25 +154,25 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run
{
JSONObject nodePayload = new JSONObject();
- final String nodeName = getNodeName(run, buildData, updatedBuildData);
+ // It seems like "built-in" node as the default value does not have much practical sense.
+ // It is done to preserve existing behavior (note that this logic is not applied to metrics - also to preserve the plugin's existing behavior).
+ // The mechanism before the changes was the following:
+ // - DatadogBuildListener#onInitialize created a BuildData instance
+ // - that BuildData had its nodeName populated from environment variables obtained from Run
+ // - the instance was persisted in an Action attached to Run, and was used to populate the node name of the pipeline span (always as the last fallback)
+ // For pipelines, the environment variables that Run#getEnvironment returns at the beginning of the run always (!) contain NODE_NAME = "built-in" (when invoked at the end of the run, the env will have a different set of variables).
+ // This is true regardless of whether the pipeline definition has a top-level agent block or not.
+ // For freestyle projects the correct NODE_NAME seems to be available in the run's environment variables at every stage of the build's lifecycle.
+ final String nodeName = buildData.getNodeName("built-in");
nodePayload.put("name", nodeName);
if(!DatadogUtilities.isMainNode(nodeName)) {
-
- final String workerHostname = getNodeHostname(run, updatedBuildData);
-
- // If the worker hostname is equals to controller hostname but the node name is not master/built-in then we
- // could not detect the worker hostname properly. Check if it's set in the environment, otherwise set to none.
- if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) {
- String envHostnameOrNone = DatadogUtilities.getHostnameFromWorkerEnv(run).orElse(HOSTNAME_NONE);
- nodePayload.put("hostname", envHostnameOrNone);
- } else {
- nodePayload.put("hostname", (workerHostname != null) ? workerHostname : HOSTNAME_NONE);
- }
+ final String workerHostname = buildData.getHostname("");
+ nodePayload.put("hostname", !workerHostname.isEmpty() ? workerHostname : HOSTNAME_NONE);
} else {
nodePayload.put("hostname", DatadogUtilities.getHostname(null));
}
- final String workspace = buildData.getWorkspace("").isEmpty() ? updatedBuildData.getWorkspace("") : buildData.getWorkspace("");
+ final String workspace = buildData.getWorkspace("");
nodePayload.put("workspace", workspace);
final Set nodeLabels = getNodeLabels(run, nodeName);
@@ -185,7 +181,6 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run
payload.put("node", nodePayload);
}
-
// Git info
{
JSONObject gitPayload = new JSONObject();
@@ -198,54 +193,54 @@ public JSONObject finishBuildTrace(final BuildData buildData, final Run,?> run
gitPayload.put("tag", gitTag);
}
- final String gitCommit = buildData.getGitCommit("").isEmpty() ? updatedBuildData.getGitCommit("") : buildData.getGitCommit("");
+ final String gitCommit = buildData.getGitCommit("");
if(!isValidCommit(gitCommit)) {
logger.warning("Couldn't find a valid commit for pipelineID '"+buildData.getBuildTag("")+"'. GIT_COMMIT environment variable was not found or has invalid SHA1 string: " + gitCommit);
} else {
gitPayload.put("sha", gitCommit);
}
- final String gitRepoUrl = buildData.getGitUrl("").isEmpty() ? updatedBuildData.getGitUrl("") : buildData.getGitUrl("");
+ final String gitRepoUrl = buildData.getGitUrl("");
if (gitRepoUrl != null && !gitRepoUrl.isEmpty()) {
gitPayload.put("repository_url", filterSensitiveInfo(gitRepoUrl));
}
- final String gitMessage = buildData.getGitMessage("").isEmpty() ? updatedBuildData.getGitMessage("") : buildData.getGitMessage("");
+ final String gitMessage = buildData.getGitMessage("");
if (gitMessage != null && !gitMessage.isEmpty()) {
gitPayload.put("message", gitMessage);
}
- final String gitAuthorDate = buildData.getGitAuthorDate("").isEmpty() ? updatedBuildData.getGitAuthorDate("") : buildData.getGitAuthorDate("");
+ final String gitAuthorDate = buildData.getGitAuthorDate("");
if (gitAuthorDate != null && !gitAuthorDate.isEmpty()) {
gitPayload.put("author_time", gitAuthorDate);
}
- final String gitCommitDate = buildData.getGitCommitterDate("").isEmpty() ? updatedBuildData.getGitCommitterDate("") : buildData.getGitCommitterDate("");
+ final String gitCommitDate = buildData.getGitCommitterDate("");
if (gitCommitDate != null && !gitCommitDate.isEmpty()) {
gitPayload.put("commit_time", gitCommitDate);
}
- final String gitCommitterName = buildData.getGitCommitterName("").isEmpty() ? updatedBuildData.getGitCommitterName("") : buildData.getGitCommitterName("");
+ final String gitCommitterName = buildData.getGitCommitterName("");
if (gitCommitterName != null && !gitCommitterName.isEmpty()) {
gitPayload.put("committer_name", gitCommitterName);
}
- final String gitCommitterEmail = buildData.getGitCommitterEmail("").isEmpty() ? updatedBuildData.getGitCommitterEmail("") : buildData.getGitCommitterEmail("");
+ final String gitCommitterEmail = buildData.getGitCommitterEmail("");
if (gitCommitterEmail != null && !gitCommitterEmail.isEmpty()) {
gitPayload.put("committer_email", gitCommitterEmail);
}
- final String gitAuthorName = buildData.getGitAuthorName("").isEmpty() ? updatedBuildData.getGitAuthorName("") : buildData.getGitAuthorName("");
+ final String gitAuthorName = buildData.getGitAuthorName("");
if (gitAuthorName != null && !gitAuthorName.isEmpty()) {
gitPayload.put("author_name", gitAuthorName);
}
- final String gitAuthorEmail = buildData.getGitAuthorEmail("").isEmpty() ? updatedBuildData.getGitAuthorEmail("") : buildData.getGitAuthorEmail("");
+ final String gitAuthorEmail = buildData.getGitAuthorEmail("");
if (gitAuthorEmail != null && !gitAuthorEmail.isEmpty()) {
gitPayload.put("author_email", gitAuthorEmail);
}
- final String gitDefaultBranch = buildData.getGitDefaultBranch("").isEmpty() ? updatedBuildData.getGitDefaultBranch("") : buildData.getGitDefaultBranch("");
+ final String gitDefaultBranch = buildData.getGitDefaultBranch("");
if (gitDefaultBranch != null && !gitDefaultBranch.isEmpty()) {
gitPayload.put("default_branch", gitDefaultBranch);
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java
index 9d5e641e2..fb017274b 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/DatadogWebhookPipelineLogic.java
@@ -1,18 +1,15 @@
package org.datadog.jenkins.plugins.datadog.traces;
-import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.cleanUpTraceActions;
-import static org.datadog.jenkins.plugins.datadog.DatadogUtilities.statusFromResult;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.filterSensitiveInfo;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeBranch;
import static org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils.normalizeTag;
import hudson.model.Run;
+import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
import java.util.Date;
+import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@@ -22,12 +19,9 @@
import org.apache.commons.lang.StringUtils;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
-import org.datadog.jenkins.plugins.datadog.model.CIGlobalTagsAction;
-import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
-import org.datadog.jenkins.plugins.datadog.util.git.GitUtils;
-import org.jenkinsci.plugins.workflow.graph.FlowEndNode;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
+import org.datadog.jenkins.plugins.datadog.model.Status;
+import org.datadog.jenkins.plugins.datadog.util.TagsUtil;
/**
* Keeps the logic to send webhooks related to inner jobs of Jenkins Pipelines (datadog levels: stage and job).
@@ -37,88 +31,55 @@ public class DatadogWebhookPipelineLogic extends DatadogBasePipelineLogic {
@Nonnull
@Override
- public Collection execute(FlowNode flowNode, Run run) {
+ public JSONObject toJson(PipelineStepData current, Run, ?> run) throws IOException, InterruptedException {
+ BuildData buildData = new BuildData(run, DatadogUtilities.getTaskListener(run));
- if (!DatadogUtilities.getDatadogGlobalDescriptor().getEnableCiVisibility()) {
- return Collections.emptySet();
- }
+ JSONObject payload = new JSONObject();
+ payload.put("partial_retry", false);
- final IsPipelineAction isPipelineAction = run.getAction(IsPipelineAction.class);
- if(isPipelineAction == null) {
- run.addAction(new IsPipelineAction());
- }
+ long traceId = current.getTraceId();
+ payload.put("trace_id", traceId);
- final BuildSpanAction buildSpanAction = run.getAction(BuildSpanAction.class);
- if(buildSpanAction == null) {
- return Collections.emptySet();
- }
+ long parentSpanId = current.getParentSpanId();
+ payload.put("parent_span_id", parentSpanId);
- final BuildData buildData = buildSpanAction.getBuildData();
- if(!DatadogUtilities.isLastNode(flowNode)){
- updateCIGlobalTags(run);
- return Collections.emptySet();
- }
+ long spanId = current.getSpanId();
+ payload.put("span_id", spanId);
- final TraceSpan.TraceSpanContext traceSpanContext = buildSpanAction.getBuildSpanContext();
- final BuildPipelineNode root = buildPipelineTree((FlowEndNode) flowNode);
- try {
- return collectTraces(run, buildData, root, null, traceSpanContext);
- } finally {
- // Explicit removal of InvisibleActions used to collect Traces when the Run finishes.
- cleanUpTraceActions(run);
- }
- }
+ payload.put("id", current.getId());
+ payload.put("name", current.getName());
- private Collection collectTraces(final Run run, final BuildData buildData, final BuildPipelineNode current, final BuildPipelineNode parent, final TraceSpan.TraceSpanContext parentSpanContext) {
+ final String buildLevel = current.getType().getBuildLevel();
+ payload.put("level", buildLevel);
- if(!isTraceable(current)) {
- Collection traces = new ArrayList<>();
- // If the current node is not traceable, we continue with its children
- for(final BuildPipelineNode child : current.getChildren()) {
- traces.addAll(collectTraces(run, buildData, child, parent, parentSpanContext));
- }
- return traces;
- }
// If the root has propagated queue time, we need to adjust all startTime and endTime from Jenkins pipelines
// because this time will be subtracted in the root. See DatadogTraceBuildLogic#finishBuildTrace method.
final long propagatedMillisInQueue = Math.max(buildData.getPropagatedMillisInQueue(-1L), 0);
- final long fixedStartTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getStartTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue));
- final long fixedEndTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getEndTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue));
- final String jenkinsResult = current.getResult();
- final String status = statusFromResult(jenkinsResult);
- final String prefix = current.getType().getTagName();
- final String buildLevel = current.getType().getBuildLevel();
-
- final TraceSpan.TraceSpanContext spanContext = new TraceSpan.TraceSpanContext(parentSpanContext.getTraceId(), parentSpanContext.getSpanId(), current.getSpanId());
- final TraceSpan span = new TraceSpan(buildOperationName(current), TimeUnit.MILLISECONDS.toNanos(fixedStartTimeMillis + propagatedMillisInQueue), spanContext);
- final Map envVars = current.getEnvVars();
-
- JSONObject payload = new JSONObject();
- payload.put("level", buildLevel);
- final String url = envVars.get("BUILD_URL") != null ? envVars.get("BUILD_URL") : buildData.getBuildUrl("");
- if(StringUtils.isNotBlank(url)) {
- payload.put("url", url + "execution/node/"+current.getId()+"/");
- }
+ final long fixedStartTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getStartTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue));
payload.put("start", DatadogUtilities.toISO8601(new Date(fixedStartTimeMillis)));
+
+ final long fixedEndTimeMillis = TimeUnit.MICROSECONDS.toMillis(current.getEndTimeMicros() - TimeUnit.MILLISECONDS.toMicros(propagatedMillisInQueue));
payload.put("end", DatadogUtilities.toISO8601(new Date(fixedEndTimeMillis)));
- payload.put("partial_retry", false);
- payload.put("queue_time", TimeUnit.NANOSECONDS.toMillis(DatadogUtilities.getNanosInQueue(current)));
- payload.put("status", status);
- payload.put("trace_id", spanContext.getTraceId());
- payload.put("span_id", spanContext.getSpanId());
- payload.put("parent_span_id", spanContext.getParentId());
+ payload.put("queue_time", TimeUnit.NANOSECONDS.toMillis(current.getNanosInQueue()));
- payload.put("id", current.getId());
- payload.put("name", current.getName());
+ Status status = current.getStatus();
+ payload.put("status", status.toTag());
payload.put("pipeline_unique_id", buildData.getBuildTag(""));
payload.put("pipeline_name", buildData.getBaseJobName(""));
+
+ String url = buildData.getBuildUrl("");
+ if (StringUtils.isNotBlank(url)) {
+ payload.put("url", url + "execution/node/" + current.getId() + "/");
+ }
+
if (buildLevel.equals("stage")) {
- if (parent != null && parent.getType().getBuildLevel() == "stage") {
+ String parentStageId = current.getStageId();
+ if (parentStageId != null) {
// Stage is a child of another stage
- payload.put("parent_stage_id", parent.getStageId());
+ payload.put("parent_stage_id", parentStageId);
}
} else if (buildLevel.equals("job")) {
payload.put("stage_id", current.getStageId());
@@ -126,10 +87,8 @@ private Collection collectTraces(final Run run, final BuildData buil
}
// Errors
- if(current.isError() && current.getErrorObj() != null) {
-
+ if (current.isError() && current.getErrorObj() != null) {
JSONObject errPayload = new JSONObject();
-
final Throwable error = current.getErrorObj();
errPayload.put("message", error.getMessage());
errPayload.put("type", error.getClass().getName());
@@ -139,7 +98,7 @@ private Collection collectTraces(final Run run, final BuildData buil
errPayload.put("stack", errorString.toString());
payload.put("error", errPayload);
- } else if(current.isUnstable() && current.getUnstableMessage() != null){
+ } else if (current.isUnstable() && current.getUnstableMessage() != null) {
JSONObject errPayload = new JSONObject();
errPayload.put("message", current.getUnstableMessage());
errPayload.put("type", "unstable");
@@ -151,18 +110,12 @@ private Collection collectTraces(final Run run, final BuildData buil
{
JSONObject nodePayload = new JSONObject();
- final String nodeName = getNodeName(run, current, buildData);
+ final String nodeName = getNodeName(current, buildData);
nodePayload.put("name", nodeName);
- if(!DatadogUtilities.isMainNode(nodeName)) {
- final String workerHostname = getNodeHostname(run, current);
- // If the worker hostname is equals to controller hostname but the node name is not "master"
- // then we could not detect the worker hostname properly. We set _dd.hostname to 'none' explicitly.
- if(buildData.getHostname("").equalsIgnoreCase(workerHostname)) {
- nodePayload.put("hostname", HOSTNAME_NONE);
- } else {
- nodePayload.put("hostname", (workerHostname != null) ? workerHostname : HOSTNAME_NONE);
- }
+ if (!DatadogUtilities.isMainNode(nodeName)) {
+ final String workerHostname = getNodeHostname(current, buildData);
+ nodePayload.put("hostname", (workerHostname != null) ? workerHostname : HOSTNAME_NONE);
} else {
nodePayload.put("hostname", DatadogUtilities.getHostname(null));
}
@@ -180,24 +133,24 @@ private Collection collectTraces(final Run run, final BuildData buil
{
JSONObject gitPayload = new JSONObject();
- final String rawGitBranch = GitUtils.resolveGitBranch(envVars, buildData);
- String gitBranch = null;
- String gitTag = null;
- if(rawGitBranch != null && !rawGitBranch.isEmpty()) {
+ String rawGitBranch = buildData.getBranch("");
+ String gitBranch;
+ String gitTag;
+ if (rawGitBranch != null && !rawGitBranch.isEmpty()) {
gitBranch = normalizeBranch(rawGitBranch);
- if(gitBranch != null) {
+ if (gitBranch != null) {
gitPayload.put("branch", gitBranch);
}
gitTag = normalizeTag(rawGitBranch);
- if(gitTag != null) {
+ if (gitTag != null) {
gitPayload.put("tag", gitTag);
}
}
// If the user set DD_GIT_TAG manually,
// we override the git.tag value.
- gitTag = GitUtils.resolveGitTag(envVars, buildData);
- if(StringUtils.isNotEmpty(gitTag)){
+ gitTag = buildData.getGitTag("");
+ if (StringUtils.isNotEmpty(gitTag)) {
gitPayload.put("tag", gitTag);
}
@@ -205,12 +158,12 @@ private Collection collectTraces(final Run run, final BuildData buil
// If we could not detect a valid commit, that means that the GIT_COMMIT environment variable
// was overridden by the user at top level, so we set the content what we have (despite it's not valid).
// We will show a logger.warning at the end of the pipeline.
- final String gitCommit = GitUtils.resolveGitCommit(envVars, buildData);
- if(gitCommit != null && !gitCommit.isEmpty()) {
+ String gitCommit = buildData.getGitCommit("");
+ if (gitCommit != null && !gitCommit.isEmpty()) {
gitPayload.put("sha", gitCommit);
}
- final String gitRepoUrl = GitUtils.resolveGitRepositoryUrl(envVars, buildData);
+ String gitRepoUrl = buildData.getGitUrl("");
if (gitRepoUrl != null && !gitRepoUrl.isEmpty()) {
gitPayload.put("repository_url", filterSensitiveInfo(gitRepoUrl));
}
@@ -260,9 +213,8 @@ private Collection collectTraces(final Run run, final BuildData buil
// User
{
- // User
JSONObject userPayload = new JSONObject();
- final String user = envVars.get("USER") != null ? envVars.get("USER") : buildData.getUserId();
+ String user = buildData.getUserId();
userPayload.put("name", user);
if (StringUtils.isNotEmpty(buildData.getUserEmail(""))) {
userPayload.put("email", buildData.getUserEmail(""));
@@ -284,42 +236,39 @@ private Collection collectTraces(final Run run, final BuildData buil
{
JSONArray tagsPayload = new JSONArray();
- final CIGlobalTagsAction ciGlobalTagsAction = run.getAction(CIGlobalTagsAction.class);
- if(ciGlobalTagsAction != null) {
- final Map globalTags = ciGlobalTagsAction.getTags();
- for(Map.Entry globalTagEntry : globalTags.entrySet()) {
- tagsPayload.add(globalTagEntry.getKey() + ":" + globalTagEntry.getValue());
- }
+ Map globalTags = new HashMap<>(buildData.getTagsForTraces());
+ globalTags.putAll(TagsUtil.convertTagsToMapSingleValues(DatadogUtilities.getTagsFromPipelineAction(run)));
+
+ for (Map.Entry globalTagEntry : globalTags.entrySet()) {
+ tagsPayload.add(globalTagEntry.getKey() + ":" + globalTagEntry.getValue());
}
// Jenkins specific
- tagsPayload.add(CITags._DD_CI_INTERNAL + ":" + current.isInternal());
+ tagsPayload.add(CITags._DD_CI_INTERNAL + ":false");
+
+ String jenkinsResult = current.getJenkinsResult();
if (StringUtils.isNotEmpty(jenkinsResult)) {
tagsPayload.add(CITags.JENKINS_RESULT + ":" + jenkinsResult.toLowerCase());
}
+ final String prefix = current.getType().getTagName();
+
// For backwards compat
- tagsPayload.add(prefix + CITags._RESULT + ":" + status);
+ tagsPayload.add(prefix + CITags._RESULT + ":" + status.toTag());
// Arguments
final String nodePrefix = current.getType().name().toLowerCase();
- for(Map.Entry entry : current.getArgs().entrySet()) {
- tagsPayload.add(CI_PROVIDER + "." + nodePrefix + ".args."+entry.getKey() + ":" + String.valueOf(entry.getValue()));
- if("script".equals(entry.getKey())){
- tagsPayload.add(prefix + ".script" + ":" + String.valueOf(entry.getValue()));
+ for (Map.Entry entry : current.getArgs().entrySet()) {
+ tagsPayload.add(CI_PROVIDER + "." + nodePrefix + ".args." + entry.getKey() + ":" + entry.getValue());
+ if ("script".equals(entry.getKey())) {
+ tagsPayload.add(prefix + ".script" + ":" + entry.getValue());
}
}
payload.put("tags", tagsPayload);
}
- Collection traces = new ArrayList<>();
- for(final BuildPipelineNode child : current.getChildren()) {
- traces.addAll(collectTraces(run, buildData, child, current, span.context()));
- }
-
- traces.add(payload);
- return traces;
+ return payload;
}
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java
deleted file mode 100644
index 5c7438886..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/IsPipelineAction.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.traces;
-
-import hudson.model.InvisibleAction;
-
-import java.io.Serializable;
-
-public class IsPipelineAction extends InvisibleAction implements Serializable {
- private static final long serialVersionUID = 1L;
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java
deleted file mode 100644
index 750f990b2..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepDataAction.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.traces;
-
-import hudson.model.InvisibleAction;
-import hudson.model.Run;
-import org.datadog.jenkins.plugins.datadog.model.StepData;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
-
-import java.io.Serializable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-/**
- * Keeps the Step data during a certain Run.
- */
-public class StepDataAction extends InvisibleAction implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final ConcurrentMap stepDataByDescriptor = new ConcurrentHashMap<>();
-
- public StepData put(final Run,?> run, final FlowNode flowNode, final StepData stepData) {
- return stepDataByDescriptor.put(flowNode.getId(), stepData);
- }
-
- public StepData get(final Run,?> run, final FlowNode flowNode) {
- return stepDataByDescriptor.get(flowNode.getId());
- }
-
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java
deleted file mode 100644
index 8734ee52a..000000000
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/StepTraceDataAction.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package org.datadog.jenkins.plugins.datadog.traces;
-
-import hudson.model.InvisibleAction;
-import hudson.model.Run;
-import org.datadog.jenkins.plugins.datadog.model.StepTraceData;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
-
-import java.io.Serializable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-public class StepTraceDataAction extends InvisibleAction implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final ConcurrentMap stepTraceDataByDescriptor = new ConcurrentHashMap<>();
-
- public StepTraceData put(final Run,?> run, final FlowNode flowNode, final StepTraceData stepTraceData) {
- return stepTraceDataByDescriptor.put(flowNode.getId(), stepTraceData);
- }
-
- public StepTraceData get(final Run,?> run, final FlowNode flowNode) {
- return stepTraceDataByDescriptor.get(flowNode.getId());
- }
-
-}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java
index fb7c71bfa..70f8121fd 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/TraceStepEnvironmentContributor.java
@@ -7,16 +7,15 @@
import hudson.Extension;
import hudson.model.Run;
import hudson.model.TaskListener;
+import java.io.IOException;
+import java.util.logging.Logger;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
-import org.datadog.jenkins.plugins.datadog.model.StepTraceData;
+import org.datadog.jenkins.plugins.datadog.model.TraceInfoAction;
import org.jenkinsci.plugins.workflow.cps.nodes.StepAtomNode;
import org.jenkinsci.plugins.workflow.graph.FlowNode;
import org.jenkinsci.plugins.workflow.steps.StepContext;
import org.jenkinsci.plugins.workflow.steps.StepEnvironmentContributor;
-import java.io.IOException;
-import java.util.logging.Logger;
-
@Extension
public class TraceStepEnvironmentContributor extends StepEnvironmentContributor {
@@ -41,12 +40,6 @@ public void buildEnvironmentFor(StepContext stepContext, EnvVars envs, TaskListe
return;
}
- final StepTraceDataAction stepTraceDataAction = run.getAction(StepTraceDataAction.class);
- if(stepTraceDataAction == null) {
- logger.fine("Unable to set trace ids as environment variables. in Run '"+run.getFullDisplayName()+"'. StepTraceDataAction is null");
- return;
- }
-
final FlowNode flowNode = stepContext.get(FlowNode.class);
if(flowNode == null) {
logger.fine("Unable to set trace ids as environment variables. in Run '"+run.getFullDisplayName()+"'. FlowNode is null");
@@ -57,22 +50,20 @@ public void buildEnvironmentFor(StepContext stepContext, EnvVars envs, TaskListe
return;
}
- StepTraceData stepTraceData = stepTraceDataAction.get(run, flowNode);
- if(stepTraceData == null){
- stepTraceData = new StepTraceData(IdGenerator.generate());
- stepTraceDataAction.put(run, flowNode, stepTraceData);
- }
-
if(envs.get(TRACE_ID_ENVVAR_KEY) == null) {
final String traceIdStr = Long.toUnsignedString(buildSpanAction.getBuildSpanContext().getTraceId());
envs.put(TRACE_ID_ENVVAR_KEY, traceIdStr);
logger.fine("Set DD_CUSTOM_TRACE_ID="+traceIdStr+" for FlowNode: "+flowNode);
}
- if(envs.get(SPAN_ID_ENVVAR_KEY) == null) {
- final String spanIdStr = Long.toUnsignedString(stepTraceData.getSpanId());
- envs.put(SPAN_ID_ENVVAR_KEY, spanIdStr);
- logger.fine("Set DD_CUSTOM_PARENT_ID="+spanIdStr+" for FlowNode: "+flowNode);
+ TraceInfoAction traceInfoAction = run.getAction(TraceInfoAction.class);
+ if (traceInfoAction != null) {
+ Long spanId = traceInfoAction.getOrCreate(flowNode.getId());
+ if(envs.get(SPAN_ID_ENVVAR_KEY) == null) {
+ final String spanIdStr = Long.toUnsignedString(spanId);
+ envs.put(SPAN_ID_ENVVAR_KEY, spanIdStr);
+ logger.fine("Set DD_CUSTOM_PARENT_ID="+spanIdStr+" for FlowNode: "+flowNode);
+ }
}
} catch (Exception ex) {
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java
index 062b6a188..b35bc04d0 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/message/TraceSpan.java
@@ -1,9 +1,16 @@
package org.datadog.jenkins.plugins.datadog.traces.message;
+import com.thoughtworks.xstream.XStream;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.datadog.jenkins.plugins.datadog.traces.IdGenerator;
+import org.datadog.jenkins.plugins.datadog.util.DatadogActionConverter;
public class TraceSpan {
@@ -111,6 +118,15 @@ public boolean isError() {
return error;
}
+ @Override
+ public String toString() {
+ return "TraceSpan{" +
+ "operationName='" + operationName + '\'' +
+ ", serviceName='" + serviceName + '\'' +
+ ", resourceName='" + resourceName + '\'' +
+ '}';
+ }
+
public static class TraceSpanContext implements Serializable {
private static final long serialVersionUID = 1L;
@@ -142,5 +158,53 @@ public long getParentId() {
public long getSpanId() {
return spanId;
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ TraceSpanContext that = (TraceSpanContext) o;
+ return traceId == that.traceId && parentId == that.parentId && spanId == that.spanId;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(traceId, parentId, spanId);
+ }
+
+ @Override
+ public String toString() {
+ return "TraceSpanContext{" +
+ "traceId=" + traceId +
+ ", parentId=" + parentId +
+ ", spanId=" + spanId +
+ '}';
+ }
+
+ public static final class ConverterImpl extends DatadogActionConverter {
+ public ConverterImpl(XStream xs) {
+ }
+
+ @Override
+ public boolean canConvert(Class type) {
+ return TraceSpanContext.class == type;
+ }
+
+ @Override
+ public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
+ TraceSpanContext traceSpanContext = (TraceSpanContext) source;
+ writeField("traceId", traceSpanContext.traceId, writer, context);
+ writeField("parentId", traceSpanContext.parentId, writer, context);
+ writeField("spanId", traceSpanContext.spanId, writer, context);
+ }
+
+ @Override
+ public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) {
+ long traceId = readField(reader, context, long.class);
+ long parentId = readField(reader, context, long.class);
+ long spanId = readField(reader, context, long.class);
+ return new TraceSpanContext(traceId, parentId, spanId);
+ }
+ }
}
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java
index 04d875f54..6afa67467 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/AgentTraceWriteStrategy.java
@@ -1,6 +1,7 @@
package org.datadog.jenkins.plugins.datadog.traces.write;
import hudson.model.Run;
+import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@@ -8,10 +9,10 @@
import java.util.function.Supplier;
import java.util.logging.Logger;
import java.util.stream.Collectors;
-import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
import org.datadog.jenkins.plugins.datadog.clients.DatadogAgentClient;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
/**
* Trace write strategy that can dynamically switch from using APM track to using EVP Proxy.
@@ -46,15 +47,16 @@ public AgentTraceWriteStrategy(TraceWriteStrategy evpProxyStrategy, TraceWriteSt
this.checkEvpProxySupport = checkEvpProxySupport;
}
+ @Nullable
@Override
public Payload serialize(BuildData buildData, Run, ?> run) {
return getCurrentStrategy().serialize(buildData, run);
}
- @Nonnull
+ @Nullable
@Override
- public Collection serialize(FlowNode flowNode, Run, ?> run) {
- return getCurrentStrategy().serialize(flowNode, run);
+ public Payload serialize(PipelineStepData stepData, Run, ?> run) throws IOException, InterruptedException {
+ return getCurrentStrategy().serialize(stepData, run);
}
@Override
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java
index ce35751fd..68323ae86 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategy.java
@@ -1,18 +1,18 @@
package org.datadog.jenkins.plugins.datadog.traces.write;
import hudson.model.Run;
+import java.io.IOException;
import java.util.Collection;
-import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
public interface TraceWriteStrategy {
@Nullable
Payload serialize(BuildData buildData, Run, ?> run);
- @Nonnull
- Collection serialize(FlowNode flowNode, Run, ?> run);
+ @Nullable
+ Payload serialize(PipelineStepData stepData, Run, ?> run) throws IOException, InterruptedException;
void send(Collection spans);
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java
index 39cbd4a96..57c0f3ae2 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriteStrategyImpl.java
@@ -1,14 +1,15 @@
package org.datadog.jenkins.plugins.datadog.traces.write;
import hudson.model.Run;
+import java.io.IOException;
import java.util.Collection;
import java.util.function.Consumer;
import java.util.logging.Logger;
-import java.util.stream.Collectors;
-import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
import net.sf.json.JSONObject;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.DatadogBaseBuildLogic;
import org.datadog.jenkins.plugins.datadog.traces.DatadogBasePipelineLogic;
import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic;
@@ -16,7 +17,6 @@
import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic;
import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookPipelineLogic;
import org.datadog.jenkins.plugins.datadog.util.CircuitBreaker;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
public class TraceWriteStrategyImpl implements TraceWriteStrategy {
@@ -45,17 +45,18 @@ public TraceWriteStrategyImpl(Track track, Consumer> spansSe
);
}
+ @Nullable
@Override
public Payload serialize(final BuildData buildData, final Run, ?> run) {
- JSONObject buildSpan = buildLogic.finishBuildTrace(buildData, run);
+ JSONObject buildSpan = buildLogic.toJson(buildData, run);
return buildSpan != null ? new Payload(buildSpan, track) : null;
}
- @Nonnull
+ @Nullable
@Override
- public Collection serialize(FlowNode flowNode, Run, ?> run) {
- Collection stepSpans = pipelineLogic.execute(flowNode, run);
- return stepSpans.stream().map(payload -> new Payload(payload, track)).collect(Collectors.toList());
+ public Payload serialize(PipelineStepData stepData, Run, ?> run) throws IOException, InterruptedException {
+ JSONObject stepSpan = pipelineLogic.toJson(stepData, run);
+ return stepSpan != null ? new Payload(stepSpan, track) : null;
}
@Override
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java
index 7d911a635..c82bbceb4 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/traces/write/TraceWriter.java
@@ -1,8 +1,8 @@
package org.datadog.jenkins.plugins.datadog.traces.write;
import hudson.model.Run;
+import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
@@ -13,7 +13,7 @@
import org.datadog.jenkins.plugins.datadog.DatadogClient;
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
public final class TraceWriter {
@@ -58,11 +58,9 @@ public void submitBuild(final BuildData buildData, final Run,?> run) throws In
submit(span);
}
- public void submitPipelineStep(FlowNode flowNode, Run, ?> run) throws InterruptedException, TimeoutException {
- Collection spans = traceWriteStrategy.serialize(flowNode, run);
- for (Payload span : spans) {
- submit(span);
- }
+ public void submitPipelineStep(PipelineStepData stepData, Run, ?> run) throws InterruptedException, TimeoutException, IOException {
+ Payload span = traceWriteStrategy.serialize(stepData, run);
+ submit(span);
}
private void submit(@Nullable Payload span) throws InterruptedException, TimeoutException {
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java
new file mode 100644
index 000000000..fb5951794
--- /dev/null
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/DatadogActionConverter.java
@@ -0,0 +1,22 @@
+package org.datadog.jenkins.plugins.datadog.util;
+
+import com.thoughtworks.xstream.converters.Converter;
+import com.thoughtworks.xstream.converters.MarshallingContext;
+import com.thoughtworks.xstream.converters.UnmarshallingContext;
+import com.thoughtworks.xstream.io.HierarchicalStreamReader;
+import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
+
+public abstract class DatadogActionConverter implements Converter {
+ protected void writeField(String name, Object value, HierarchicalStreamWriter writer, MarshallingContext context) {
+ writer.startNode(name);
+ context.convertAnother(value);
+ writer.endNode();
+ }
+
+ protected T readField(HierarchicalStreamReader reader, UnmarshallingContext context, Class type) {
+ reader.moveDown();
+ T value = (T) context.convertAnother(null, type);
+ reader.moveUp();
+ return value;
+ }
+}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java
index 539e40d0c..6a2036f8f 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/TagsUtil.java
@@ -25,10 +25,16 @@ of this software and associated documentation files (the "Software"), to deal
package org.datadog.jenkins.plugins.datadog.util;
-import net.sf.json.JSONArray;
-
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
import java.util.logging.Logger;
+import net.sf.json.JSONArray;
public class TagsUtil {
@@ -38,21 +44,15 @@ public static Map> merge(Map> dest, Map<
if (dest == null) {
dest = new HashMap<>();
}
- if (orig == null) {
- orig = new HashMap<>();
- }
- for (final Iterator>> iter = orig.entrySet().iterator(); iter.hasNext();){
- Map.Entry> entry = iter.next();
- final String oName = entry.getKey();
- Set dValues = dest.containsKey(oName) ? dest.get(oName) : new HashSet();
- if (dValues == null) {
- dValues = new HashSet<>();
- }
- Set oValues = entry.getValue();
- if (oValues != null) {
- dValues.addAll(oValues);
+ if (orig != null) {
+ for (Map.Entry> entry : orig.entrySet()) {
+ final String oName = entry.getKey();
+ Set oValues = entry.getValue();
+ Set dValues = dest.computeIfAbsent(oName, k -> new HashSet<>());
+ if (oValues != null) {
+ dValues.addAll(oValues);
+ }
}
- dest.put(oName, dValues);
}
return dest;
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java
index fb6d50f94..5a1c8cab1 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/GitUtils.java
@@ -2,7 +2,6 @@
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_BRANCH;
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_COMMIT_SHA;
-import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_DEFAULT_BRANCH;
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_REPOSITORY_URL;
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.DD_GIT_TAG;
import static org.datadog.jenkins.plugins.datadog.util.git.GitConstants.GIT_BRANCH;
@@ -13,30 +12,21 @@
import hudson.EnvVars;
import hudson.FilePath;
-import hudson.model.Executor;
import hudson.model.Run;
import hudson.model.TaskListener;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+import javax.annotation.Nullable;
import org.apache.commons.lang.StringUtils;
-import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.audit.DatadogAudit;
-import org.datadog.jenkins.plugins.datadog.model.BuildData;
import org.datadog.jenkins.plugins.datadog.model.GitCommitAction;
import org.datadog.jenkins.plugins.datadog.model.GitRepositoryAction;
-import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils;
-import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.revwalk.RevCommit;
import org.jenkinsci.plugins.gitclient.Git;
import org.jenkinsci.plugins.gitclient.GitClient;
-import org.jenkinsci.plugins.workflow.FilePathUtils;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Map;
-import java.util.logging.Logger;
-import java.util.regex.Pattern;
public final class GitUtils {
@@ -44,63 +34,20 @@ public final class GitUtils {
private static transient final Pattern SHA1_PATTERN = Pattern.compile("\\b[a-f0-9]{40}\\b");
private static transient final Pattern SCP_REPO_URI_REGEX = Pattern.compile("^([\\w.~-]+@)?(?[\\w.-]+):(?[\\w./-]+)(?:\\?|$)(.*)$");
- private GitUtils(){}
-
- /**
- * Return the FilePath based on the Node name and the Workspace.
- * @param nodeName the node name to check
- * @param workspace the workspace to build the path
- * @return filePath for (nodeName, workspace)
- */
- public static FilePath buildFilePath(final String nodeName, final String workspace) {
- if(nodeName == null || workspace == null){
- LOGGER.fine("Unable to build FilePath. Either NodeName or Workspace is null");
- return null;
- }
-
- try {
- return DatadogUtilities.isMainNode(nodeName) ? new FilePath(FilePath.localChannel, workspace): FilePathUtils.find(nodeName, workspace);
- } catch (Exception e) {
- LOGGER.fine("Unable to build FilePath. Error: " + e);
- return null;
- }
- }
-
- /**
- * Return the FilePath associated with the run instance
- * @param run a particular execution of a Jenkins build
- * @return filePath for the run.
- */
- public static FilePath buildFilePath(final Run, ?> run){
- try {
- if(run == null) {
- LOGGER.fine("Unable to build FilePath. Run is null");
- return null;
- }
-
- final Executor executor = run.getExecutor();
- if(executor == null) {
- LOGGER.fine("Unable to build FilePath. Run executor is null");
- return null;
- }
-
- return executor.getCurrentWorkspace();
- } catch (Exception e) {
- LOGGER.fine("Unable to build FilePath. Error: " + e);
- return null;
- }
+ private GitUtils() {
}
/**
* Return the RevCommit for a certain commit based on the information
* stored in a certain workspace of a certain node.
+ *
* @param gitCommit the Git commit SHA to search info.
* @param gitClient the Git client used.
* @return revCommit
*/
- public static RevCommit searchRevCommit(final GitClient gitClient, final String gitCommit) {
+ public static RevCommit searchRevCommit(@Nullable final GitClient gitClient, final String gitCommit) {
try {
- if(gitClient == null) {
+ if (gitClient == null) {
LOGGER.fine("Unable to search RevCommit. GitClient is null");
return null;
}
@@ -114,23 +61,13 @@ public static RevCommit searchRevCommit(final GitClient gitClient, final String
/**
* Return the {@code RepositoryInfo} for a certain Git repository.
+ *
* @param gitClient The Git client to use to obtain the repository information
- * @param envVars the env vars available.
* @return repositoryInfo
*/
- public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient, EnvVars envVars) {
+ public static RepositoryInfo searchRepositoryInfo(@Nullable final GitClient gitClient) {
try {
- // Check if the default branch has been configured using an environment variable by the user.
- // This is needed because the automatic detection of the default branch using
- // the Git client is not always possible cause it depends on how Jenkins checkouts
- // the repository. Not always there is a symbolic reference to the default branch.
- final String defaultBranch = GitInfoUtils.normalizeBranch(envVars.get(DD_GIT_DEFAULT_BRANCH, null));
- LOGGER.fine("Detected default branch from environment variables: " + defaultBranch);
- if(defaultBranch != null && !defaultBranch.isEmpty()) {
- return new RepositoryInfo(defaultBranch);
- }
-
- if(gitClient == null){
+ if (gitClient == null) {
LOGGER.fine("Unable to search RevCommit. GitClient is null");
return null;
}
@@ -142,149 +79,24 @@ public static RepositoryInfo searchRepositoryInfo(final GitClient gitClient, Env
}
}
- /**
- * Returns the GitCommitAction of the Run instance.
- * If the Run instance does not have GitCommitAction or
- * the current commit hash is different from the commit hash
- * stored in the GitCommitAction, then a new GitCommitAction
- * is built and stored in the Run instance.
- *
- * The GitCommit information is stored in an action because
- * it's fairly expensive to calculate. To avoid calculating
- * every time, it's store in the Run instance as an action.
- * @param run a particular execution of a Jenkins build
- * @param gitClient the Git client
- * @param gitCommit the git commit SHA to use
- * @return the GitCommitAction with the information about Git Commit.
- */
- public static GitCommitAction buildGitCommitAction(final Run,?> run, final GitClient gitClient, String gitCommit) {
- long start = System.currentTimeMillis();
- try {
- GitCommitAction commitAction = run.getAction(GitCommitAction.class);
- if(commitAction == null || !gitCommit.equals(commitAction.getCommit())) {
- try {
- if(gitClient == null){
- LOGGER.fine("Unable to build GitCommitAction. GitClient is null");
- return null;
- }
-
- final RevCommit revCommit = GitUtils.searchRevCommit(gitClient, gitCommit);
- if(revCommit == null) {
- LOGGER.fine("Unable to build GitCommitAction. RevCommit is null. [gitCommit: "+gitCommit+"]");
- return null;
- }
-
- final GitCommitAction.Builder builder = GitCommitAction.newBuilder();
- builder.withCommit(gitCommit);
- String message;
- try {
- message = StringUtils.abbreviate(revCommit.getFullMessage(), 1500);
- } catch (Exception e) {
- LOGGER.fine("Unable to obtain git commit full message. Selecting short message. Error: " + e);
- message = revCommit.getShortMessage();
- }
- builder.withMessage(message);
-
- final PersonIdent authorIdent = revCommit.getAuthorIdent();
- if(authorIdent != null){
- builder.withAuthorName(authorIdent.getName())
- .withAuthorEmail(authorIdent.getEmailAddress())
- .withAuthorDate(DatadogUtilities.toISO8601(authorIdent.getWhen()));
- }
-
- final PersonIdent committerIdent = revCommit.getCommitterIdent();
- if(committerIdent != null) {
- builder.withCommitterName(committerIdent.getName())
- .withCommitterEmail(committerIdent.getEmailAddress())
- .withCommitterDate(DatadogUtilities.toISO8601(committerIdent.getWhen()));
- }
-
- commitAction = builder.build();
- run.addOrReplaceAction(commitAction);
- } catch (Exception e) {
- LOGGER.fine("Unable to build GitCommitAction. Error: " + e);
- }
- }
- return commitAction;
- } finally {
- long end = System.currentTimeMillis();
- DatadogAudit.log("GitUtils.buildGitCommitAction", start, end);
- }
- }
-
- /**
- * Returns the GitRepositoryAction of the Run instance.
- * If the Run instance does not have GitRepositoryAction or
- * some infor is not populated in the GitRepositoryAction,
- * then a new GitCommitAction is built and stored in the Run instance.
- *
- * The GitRepository information is stored in an action because
- * it's fairly expensive to calculate. To avoid calculating
- * every time, it's store in the Run instance as an action.
- * @param run a particular execution of a Jenkins build
- * @param gitClient the Git client
- * @param envVars the env vars available
- * @param gitRepositoryURL the git repository URL to use
- * @return the GitRepositoryAction with the information about Git repository.
- */
- public static GitRepositoryAction buildGitRepositoryAction(Run, ?> run, GitClient gitClient, final EnvVars envVars, final String gitRepositoryURL) {
- long start = System.currentTimeMillis();
- try {
- GitRepositoryAction repoAction = run.getAction(GitRepositoryAction.class);
- if(repoAction == null || !gitRepositoryURL.equals(repoAction.getRepositoryURL())) {
- try {
- if(gitClient == null){
- LOGGER.fine("Unable to build GitRepositoryAction. GitClient is null");
- return null;
- }
-
- final RepositoryInfo repositoryInfo = GitUtils.searchRepositoryInfo(gitClient, envVars);
- if(repositoryInfo == null) {
- LOGGER.fine("Unable to build GitRepositoryAction. RepositoryInfo is null");
- return null;
- }
-
- final GitRepositoryAction.Builder builder = GitRepositoryAction.newBuilder();
- builder.withRepositoryURL(gitRepositoryURL);
- builder.withDefaultBranch(repositoryInfo.getDefaultBranch());
-
- repoAction = builder.build();
- run.addOrReplaceAction(repoAction);
- } catch (Exception e) {
- LOGGER.fine("Unable to build GitRepositoryAction. Error: " + e);
- }
- }
- return repoAction;
- } finally {
- long end = System.currentTimeMillis();
- DatadogAudit.log("GitUtils.buildGitRepositoryAction", start, end);
- }
- }
-
/**
* Creates a new instance of a {@code GitClient}.
- * @param run a particular execution of a Jenkins build
- * @param listener the task listener
- * @param envVars the env vars available
- * @param nodeName the node name to use to build the Git client
+ *
+ * @param listener the task listener
+ * @param envVars the env vars available
* @param workspace the workspace to use to build the Git client
* @return gitClient
*/
- public static GitClient newGitClient(final Run,?> run, final TaskListener listener, final EnvVars envVars, final String nodeName, final String workspace) {
+ @Nullable
+ public static GitClient newGitClient(final TaskListener listener, final EnvVars envVars, final FilePath workspace) {
long start = System.currentTimeMillis();
-
try {
try {
- FilePath ws = GitUtils.buildFilePath(run);
- if(ws == null){
- ws = GitUtils.buildFilePath(nodeName, workspace);
- }
-
- if(ws == null) {
+ if (workspace == null) {
return null;
}
- final Git git = Git.with(listener, envVars).in(ws);
+ final Git git = Git.with(listener, envVars).in(workspace);
return git.getClient();
} catch (Exception e) {
LOGGER.fine("Unable to create GitClient. Error: " + e);
@@ -298,15 +110,16 @@ public static GitClient newGitClient(final Run,?> run, final TaskListener list
/**
* Check if the git commit is a valid commit.
+ *
* @param gitCommit the git commit to evaluate
* @return true if the git commit is a valid SHA 40 (HEX)
*/
public static boolean isValidCommit(String gitCommit) {
- if(gitCommit == null || gitCommit.isEmpty()) {
+ if (gitCommit == null || gitCommit.isEmpty()) {
return false;
}
- if(gitCommit.length() != 40) {
+ if (gitCommit.length() != 40) {
return false;
}
@@ -315,11 +128,12 @@ public static boolean isValidCommit(String gitCommit) {
/**
* Check if the git repository URL is a valid repository
+ *
* @param gitRepositoryURL the current git repository
* @return true if the git repository url is a valid repository in either http or scp form.
*/
public static boolean isValidRepositoryURL(String gitRepositoryURL) {
- if(gitRepositoryURL == null || gitRepositoryURL.isEmpty()) {
+ if (gitRepositoryURL == null || gitRepositoryURL.isEmpty()) {
return false;
}
@@ -334,7 +148,8 @@ public static boolean isValidRepositoryURL(String gitRepositoryURL) {
/**
* Check if the GitRepositoryAction has been already created and populated.
* Typically this method is used to avoid calculating the action multiple times.
- * @param run the current run
+ *
+ * @param run the current run
* @param gitRepositoryUrl the current git respository
* @return true if the action has been created and populated.
*/
@@ -346,7 +161,8 @@ public static boolean isRepositoryInfoAlreadyCreated(Run, ?> run, final String
/**
* Check if the GitCommitAction has been already created and populated.
* Typically this method is used to avoid calculating the action multiple times.
- * @param run the current run
+ *
+ * @param run the current run
* @param gitCommit the git commit to check for
* @return true if the action has been created and populated.
*/
@@ -360,17 +176,15 @@ public static boolean isCommitInfoAlreadyCreated(Run, ?> run, final String git
* 1: Check user supplied env var
* 2: Check Jenkins env var
* 3: Check BuildData already calculated
+ *
* @param envVars the user supplied env vars
- * @param buildData the build data
* @return the branch value.
*/
- public static String resolveGitBranch(Map envVars, BuildData buildData) {
- if(StringUtils.isNotEmpty(envVars.get(DD_GIT_BRANCH))){
+ public static String resolveGitBranch(Map envVars) {
+ if (StringUtils.isNotEmpty(envVars.get(DD_GIT_BRANCH))) {
return envVars.get(DD_GIT_BRANCH);
- } else if (StringUtils.isNotEmpty(envVars.get(GIT_BRANCH))){
+ } else if (StringUtils.isNotEmpty(envVars.get(GIT_BRANCH))) {
return envVars.get(GIT_BRANCH);
- } else if(buildData != null){
- return buildData.getBranch("");
} else {
return null;
}
@@ -381,17 +195,15 @@ public static String resolveGitBranch(Map envVars, BuildData bui
* 1: Check user supplied env var
* 2: Check Jenkins env var
* 3: Check BuildData already calculated
+ *
* @param envVars the user supplied env vars
- * @param buildData the build data
* @return the commit sha value.
*/
- public static String resolveGitCommit(Map envVars, BuildData buildData) {
- if(isValidCommit(envVars.get(DD_GIT_COMMIT_SHA))){
+ public static String resolveGitCommit(Map envVars) {
+ if (isValidCommit(envVars.get(DD_GIT_COMMIT_SHA))) {
return envVars.get(DD_GIT_COMMIT_SHA);
- } else if(isValidCommit(envVars.get(GIT_COMMIT))){
+ } else if (isValidCommit(envVars.get(GIT_COMMIT))) {
return envVars.get(GIT_COMMIT);
- } else if(buildData != null){
- return buildData.getGitCommit("");
} else {
return null;
}
@@ -402,19 +214,17 @@ public static String resolveGitCommit(Map envVars, BuildData bui
* 1: Check user supplied env var
* 2: Check Jenkins env var
* 3: Check BuildData already calculated
+ *
* @param envVars the user supplied env vars
- * @param buildData the build data
* @return the git repository url value.
*/
- public static String resolveGitRepositoryUrl(Map envVars, BuildData buildData) {
- if(StringUtils.isNotEmpty(envVars.get(DD_GIT_REPOSITORY_URL))){
+ public static String resolveGitRepositoryUrl(Map envVars) {
+ if (StringUtils.isNotEmpty(envVars.get(DD_GIT_REPOSITORY_URL))) {
return envVars.get(DD_GIT_REPOSITORY_URL);
- } else if(StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL))) {
+ } else if (StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL))) {
return envVars.get(GIT_REPOSITORY_URL);
- } else if(StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL_ALT))){
+ } else if (StringUtils.isNotEmpty(envVars.get(GIT_REPOSITORY_URL_ALT))) {
return envVars.get(GIT_REPOSITORY_URL_ALT);
- } else if(buildData != null){
- return buildData.getGitUrl("");
} else {
return null;
}
@@ -424,15 +234,13 @@ public static String resolveGitRepositoryUrl(Map envVars, BuildD
* Resolve the value for the git tag based
* 1: Check user supplied env var
* 3: Check BuildData already calculated
+ *
* @param envVars the user supplied environment variables
- * @param buildData the build data
* @return the git tag value.
*/
- public static String resolveGitTag(Map envVars, BuildData buildData) {
- if(StringUtils.isNotEmpty(envVars.get(DD_GIT_TAG))){
+ public static String resolveGitTag(Map envVars) {
+ if (StringUtils.isNotEmpty(envVars.get(DD_GIT_TAG))) {
return envVars.get(DD_GIT_TAG);
- } else if(buildData != null){
- return buildData.getGitTag("");
} else {
return null;
}
@@ -440,11 +248,12 @@ public static String resolveGitTag(Map envVars, BuildData buildD
/**
* Check if the env vars map contains any environment variable with Git information supplied by the user manually.
+ *
* @param envVars the environment variables
* @return true if any of the env vars is not empty.
*/
public static boolean isUserSuppliedGit(Map envVars) {
- if(envVars == null) {
+ if (envVars == null) {
return false;
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java
index 91f74b215..15a2ed0bb 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfo.java
@@ -1,20 +1,34 @@
package org.datadog.jenkins.plugins.datadog.util.git;
import java.io.Serializable;
+import javax.annotation.Nullable;
public class RepositoryInfo implements Serializable {
- public static final RepositoryInfo EMPTY_REPOSITORY_INFO = new RepositoryInfo("");
-
private static final long serialVersionUID = 1L;
+ private final String repoUrl;
private final String defaultBranch;
+ private final String branch;
- public RepositoryInfo(String defaultBranch) {
+ public RepositoryInfo(String repoUrl, String defaultBranch, String branch) {
+ this.repoUrl = repoUrl;
this.defaultBranch = defaultBranch;
+ this.branch = branch;
+ }
+
+ @Nullable
+ public String getRepoUrl() {
+ return repoUrl;
}
+ @Nullable
public String getDefaultBranch() {
return defaultBranch;
}
+
+ @Nullable
+ public String getBranch() {
+ return branch;
+ }
}
diff --git a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java
index d9f37b06c..079b1840d 100644
--- a/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java
+++ b/src/main/java/org/datadog/jenkins/plugins/datadog/util/git/RepositoryInfoCallback.java
@@ -1,18 +1,20 @@
package org.datadog.jenkins.plugins.datadog.util.git;
import hudson.remoting.VirtualChannel;
+import java.io.IOException;
+import java.util.Set;
+import java.util.logging.Logger;
import org.datadog.jenkins.plugins.datadog.traces.GitInfoUtils;
+import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
+import org.eclipse.jgit.lib.StoredConfig;
import org.jenkinsci.plugins.gitclient.RepositoryCallback;
-import java.io.IOException;
-import java.util.logging.Logger;
-
/**
* Returns the RepositoryInfo instance for a certain repository
* using the JGit.
- *
+ *
* This must be called using gitClient.withRepository(...) method.
* See GitUtils.
*/
@@ -24,25 +26,52 @@ public final class RepositoryInfoCallback implements RepositoryCallback remoteNames = repository.getRemoteNames();
+ if (!remoteNames.isEmpty()) {
+ return remoteNames.iterator().next();
+ }
+ return Constants.DEFAULT_REMOTE_NAME;
+ }
+
+ private String getDefaultBranch(Repository repository, String remoteName) throws Exception {
+ Ref remoteHead = repository.findRef("refs/remotes/" + remoteName + "/HEAD");
+ if (remoteHead != null && remoteHead.isSymbolic()) {
+ return GitInfoUtils.normalizeBranch(remoteHead.getTarget().getName());
+ }
+ if (repository.findRef("master") != null) {
+ return "master";
+ }
+ if (repository.findRef("main") != null) {
+ return "main";
+ }
+ return null;
+ }
}
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java
index f44233418..4709bf93c 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogGlobalConfigurationTest.java
@@ -25,7 +25,12 @@
public class DatadogGlobalConfigurationTest {
@ClassRule
- public static JenkinsRule jenkinsRule = new JenkinsRule();
+ public static JenkinsRule jenkinsRule;
+
+ static {
+ jenkinsRule = new JenkinsRule();
+ jenkinsRule.timeout = 300; // default value of 180 is too small for all the test cases in this class
+ }
@Rule public JenkinsConfiguredWithCodeRule r = new JenkinsConfiguredWithCodeRule();
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java
index 4da146b94..b5305b06a 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/DatadogUtilitiesTest.java
@@ -25,6 +25,8 @@ of this software and associated documentation files (the "Software"), to deal
package org.datadog.jenkins.plugins.datadog;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.when;
@@ -67,55 +69,55 @@ public void setUpMocks() {
@Test
public void testCstrToList(){
- Assert.assertTrue(DatadogUtilities.cstrToList(null).isEmpty());
- Assert.assertTrue(DatadogUtilities.cstrToList("").isEmpty());
- Assert.assertTrue(DatadogUtilities.cstrToList(" , ").isEmpty());
+ assertTrue(DatadogUtilities.cstrToList(null).isEmpty());
+ assertTrue(DatadogUtilities.cstrToList("").isEmpty());
+ assertTrue(DatadogUtilities.cstrToList(" , ").isEmpty());
List items = new ArrayList<>();
items.add("item1");
- Assert.assertTrue(DatadogUtilities.cstrToList("item1").equals(items));
- Assert.assertTrue(DatadogUtilities.cstrToList(" item1 ").equals(items));
- Assert.assertTrue(DatadogUtilities.cstrToList(" , item1 , ").equals(items));
+ assertTrue(DatadogUtilities.cstrToList("item1").equals(items));
+ assertTrue(DatadogUtilities.cstrToList(" item1 ").equals(items));
+ assertTrue(DatadogUtilities.cstrToList(" , item1 , ").equals(items));
items = new ArrayList<>();
items.add("item1");
items.add("item2");
- Assert.assertTrue(DatadogUtilities.cstrToList("item1,item2").equals(items));
- Assert.assertTrue(DatadogUtilities.cstrToList(" item1 , item2 ").equals(items));
- Assert.assertTrue(DatadogUtilities.cstrToList(" , item1 , item2 , ").equals(items));
+ assertTrue(DatadogUtilities.cstrToList("item1,item2").equals(items));
+ assertTrue(DatadogUtilities.cstrToList(" item1 , item2 ").equals(items));
+ assertTrue(DatadogUtilities.cstrToList(" , item1 , item2 , ").equals(items));
}
@Test
public void testLinesToList(){
- Assert.assertTrue(DatadogUtilities.linesToList(null).isEmpty());
- Assert.assertTrue(DatadogUtilities.linesToList("").isEmpty());
+ assertTrue(DatadogUtilities.linesToList(null).isEmpty());
+ assertTrue(DatadogUtilities.linesToList("").isEmpty());
List items = new ArrayList<>();
items.add("item1");
- Assert.assertTrue(DatadogUtilities.linesToList("item1").equals(items));
- Assert.assertTrue(DatadogUtilities.linesToList(" item1 ").equals(items));
- Assert.assertTrue(DatadogUtilities.linesToList(" \n item1 \n ").equals(items));
+ assertTrue(DatadogUtilities.linesToList("item1").equals(items));
+ assertTrue(DatadogUtilities.linesToList(" item1 ").equals(items));
+ assertTrue(DatadogUtilities.linesToList(" \n item1 \n ").equals(items));
items = new ArrayList<>();
items.add("item1");
items.add("item2");
- Assert.assertTrue(DatadogUtilities.linesToList("item1\nitem2").equals(items));
- Assert.assertTrue(DatadogUtilities.linesToList(" item1 \n item2 ").equals(items));
- Assert.assertTrue(DatadogUtilities.linesToList(" \n item1 \n item2 \n ").equals(items));
+ assertTrue(DatadogUtilities.linesToList("item1\nitem2").equals(items));
+ assertTrue(DatadogUtilities.linesToList(" item1 \n item2 ").equals(items));
+ assertTrue(DatadogUtilities.linesToList(" \n item1 \n item2 \n ").equals(items));
}
@Test
public void isStageNodeTest() {
- Assert.assertFalse(DatadogUtilities.isStageNode(null));
+ assertFalse(DatadogUtilities.isStageNode(null));
BlockStartNode node = mock(BlockStartNode.class);
- Assert.assertFalse(DatadogUtilities.isStageNode(node));
+ assertFalse(DatadogUtilities.isStageNode(node));
when(node.getAction(LabelAction.class)).thenReturn(mock(LabelAction.class));
- Assert.assertTrue(DatadogUtilities.isStageNode(node));
+ assertTrue(DatadogUtilities.isStageNode(node));
when(node.getAction(ThreadNameAction.class)).thenReturn(mock(ThreadNameAction.class));
- Assert.assertFalse(DatadogUtilities.isStageNode(node));
+ assertFalse(DatadogUtilities.isStageNode(node));
}
@Test
@@ -213,4 +215,28 @@ public void testGetHostname() throws IOException {
}
}
+ @Test
+ public void testIsPrivateIPv4Address() {
+ assertFalse(DatadogUtilities.isPrivateIPv4Address(null));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address(""));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("google.com"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("my.subdomain.domain.com"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("123.456.789.012"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("10.0.my-domain.com"));
+ assertTrue(DatadogUtilities.isPrivateIPv4Address("10.0.0.1"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("10.0.0.1.1"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("10.0.0.1.org"));
+ assertTrue(DatadogUtilities.isPrivateIPv4Address("10.255.255.255"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("10.255.255.256"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("10.255.256.255"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("10.-1.255.255"));
+ assertTrue(DatadogUtilities.isPrivateIPv4Address("172.16.0.1"));
+ assertTrue(DatadogUtilities.isPrivateIPv4Address("172.31.0.1"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("172.15.0.1"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("172.32.0.1"));
+ assertTrue(DatadogUtilities.isPrivateIPv4Address("192.168.0.1"));
+ assertTrue(DatadogUtilities.isPrivateIPv4Address("192.168.255.255"));
+ assertFalse(DatadogUtilities.isPrivateIPv4Address("192.167.255.255"));
+ }
+
}
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java
index 0fd5e589e..9acc22a2f 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientStub.java
@@ -25,7 +25,9 @@ of this software and associated documentation files (the "Software"), to deal
package org.datadog.jenkins.plugins.datadog.clients;
+import com.google.common.base.Objects;
import hudson.model.Run;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -35,14 +37,16 @@ of this software and associated documentation files (the "Software"), to deal
import java.util.Map;
import java.util.Optional;
import java.util.Set;
+import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
-import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
import net.sf.json.JSONObject;
import org.datadog.jenkins.plugins.datadog.DatadogClient;
import org.datadog.jenkins.plugins.datadog.DatadogEvent;
import org.datadog.jenkins.plugins.datadog.model.BuildData;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.DatadogTraceBuildLogic;
import org.datadog.jenkins.plugins.datadog.traces.DatadogTracePipelineLogic;
import org.datadog.jenkins.plugins.datadog.traces.DatadogWebhookBuildLogic;
@@ -52,7 +56,6 @@ of this software and associated documentation files (the "Software"), to deal
import org.datadog.jenkins.plugins.datadog.traces.write.Payload;
import org.datadog.jenkins.plugins.datadog.traces.write.TraceWriteStrategy;
import org.datadog.jenkins.plugins.datadog.traces.write.Track;
-import org.jenkinsci.plugins.workflow.graph.FlowNode;
import org.junit.Assert;
public class DatadogClientStub implements DatadogClient {
@@ -63,10 +66,10 @@ public class DatadogClientStub implements DatadogClient {
public List logLines;
public DatadogClientStub() {
- this.metrics = new ArrayList<>();
- this.serviceChecks = new ArrayList<>();
- this.events = new ArrayList<>();
- this.logLines = new ArrayList<>();
+ this.metrics = new CopyOnWriteArrayList<>();
+ this.serviceChecks = new CopyOnWriteArrayList<>();
+ this.events = new CopyOnWriteArrayList<>();
+ this.logLines = new CopyOnWriteArrayList<>();
}
@Override
@@ -78,7 +81,7 @@ public boolean event(DatadogEvent event) {
@Override
public boolean incrementCounter(String name, String hostname, Map> tags) {
for (DatadogMetric m : this.metrics) {
- if(m.same(new DatadogMetric(name, 0, hostname, convertTagMapToList(tags)))) {
+ if (m.same(new DatadogMetric(name, 0, hostname, convertTagMapToList(tags)))) {
double value = m.getValue() + 1;
this.metrics.remove(m);
this.metrics.add(new DatadogMetric(name, value, hostname, convertTagMapToList(tags)));
@@ -134,7 +137,7 @@ public boolean assertMetric(String name, double value, String hostname, String[]
"metrics: {" + this.metrics.toString() + " }");
return false;
}
-
+
/*
* Returns the value of the asserted metric if it exists.
*/
@@ -157,10 +160,10 @@ public double assertMetricGetValue(String name, String hostname, String[] tags)
*/
public boolean assertMetricValues(String name, double value, String hostname, int count) {
DatadogMetric m = new DatadogMetric(name, value, hostname, new ArrayList<>());
-
+
// compare without tags so metrics of the same value are considered the same.
long timesSeen = this.metrics.stream().filter(x -> x.sameNoTags(m)).count();
- if (timesSeen == count){
+ if (timesSeen == count) {
return true;
}
Assert.fail("metric { " + m.toString() + " found " + timesSeen + " times, not " + count);
@@ -175,7 +178,7 @@ public boolean assertMetricValuesMin(String name, double value, String hostname,
// compare without tags so metrics of the same value are considered the same.
long timesSeen = this.metrics.stream().filter(x -> x.sameNoTags(m)).count();
- if (timesSeen >= min){
+ if (timesSeen >= min) {
return true;
}
Assert.fail("metric { " + m.toString() + " found " + timesSeen + " times, not more than" + min);
@@ -186,12 +189,25 @@ public boolean assertMetric(String name, String hostname, String[] tags) {
// Assert that a metric with the same name and tags has already been submitted without checking the value.
DatadogMetric m = new DatadogMetric(name, 0, hostname, Arrays.asList(tags));
Optional match = this.metrics.stream().filter(t -> t.same(m)).findFirst();
- if(match.isPresent()){
+ if (match.isPresent()) {
this.metrics.remove(match.get());
return true;
}
- Assert.fail("metric { " + m.toString() + " does not exist (ignoring value). " +
- "metrics: {" + this.metrics.toString() + " }");
+
+ List sameMetricsNoTags = metrics.stream().filter(t -> t.sameNoTags(m)).collect(Collectors.toList());
+ if (!sameMetricsNoTags.isEmpty()) {
+ Assert.fail("metric { " + m + " does not exist (ignoring value).\n" +
+ "Same metrics ignoring tags: {" + sameMetricsNoTags + " }");
+ }
+
+ List metricsWithSameName = metrics.stream().filter(t -> Objects.equal(t.getName(), m.getName())).collect(Collectors.toList());
+ if (!metricsWithSameName.isEmpty()) {
+ Assert.fail("metric { " + m + " does not exist (ignoring value).\n" +
+ "Metrics with same name: {" + metricsWithSameName + " }");
+ }
+
+ Assert.fail("metric { " + m + " does not exist (ignoring value).\n" +
+ "Metrics: {" + this.metrics.toString() + " }");
return false;
}
@@ -241,11 +257,11 @@ public boolean assertedAllEvents() {
return false;
}
- public static List convertTagMapToList(Map> tags){
+ public static List convertTagMapToList(Map> tags) {
List result = new ArrayList<>();
for (String name : tags.keySet()) {
Set values = tags.get(name);
- for (String value : values){
+ for (String value : values) {
result.add(String.format("%s:%s", name, value));
}
}
@@ -253,7 +269,7 @@ public static List convertTagMapToList(Map> tags){
}
- public static Map> addTagToMap(Map> tags, String name, String value){
+ public static Map> addTagToMap(Map> tags, String name, String value) {
Set v = tags.containsKey(name) ? tags.get(name) : new HashSet();
v.add(value);
tags.put(name, v);
@@ -267,17 +283,18 @@ private static final class StubTraceWriteStrategy implements TraceWriteStrategy
private final Collection traces = new LinkedBlockingQueue<>();
private final Collection webhooks = new LinkedBlockingQueue<>();
+ @Nullable
@Override
public Payload serialize(BuildData buildData, Run, ?> run) {
if (isWebhook) {
- JSONObject json = new DatadogWebhookBuildLogic().finishBuildTrace(buildData, run);
+ JSONObject json = new DatadogWebhookBuildLogic().toJson(buildData, run);
if (json == null) {
return null;
}
webhooks.add(json);
return new Payload(json, Track.WEBHOOK);
} else {
- TraceSpan span = new DatadogTraceBuildLogic().createSpan(buildData, run);
+ TraceSpan span = new DatadogTraceBuildLogic().toSpan(buildData, run);
if (span == null) {
return null;
}
@@ -287,19 +304,24 @@ public Payload serialize(BuildData buildData, Run, ?> run) {
}
}
- @Nonnull
+ @Nullable
@Override
- public Collection serialize(FlowNode flowNode, Run, ?> run) {
+ public Payload serialize(PipelineStepData stepData, Run, ?> run) throws IOException, InterruptedException {
if (isWebhook) {
- Collection jsons = new DatadogWebhookPipelineLogic().execute(flowNode, run);
- webhooks.addAll(jsons);
- return jsons.stream().map(payload -> new Payload(payload, Track.WEBHOOK)).collect(Collectors.toList());
+ JSONObject json = new DatadogWebhookPipelineLogic().toJson(stepData, run);
+ if (json == null) {
+ return null;
+ }
+ webhooks.add(json);
+ return new Payload(json, Track.WEBHOOK);
} else {
- Collection traceSpans = new DatadogTracePipelineLogic().collectTraces(flowNode, run);
- traces.addAll(traceSpans);
- JsonTraceSpanMapper mapper = new JsonTraceSpanMapper();
- List jsons = traceSpans.stream().map(mapper::map).collect(Collectors.toList());
- return jsons.stream().map(payload -> new Payload(payload, Track.APM)).collect(Collectors.toList());
+ TraceSpan span = new DatadogTracePipelineLogic().toSpan(stepData, run);
+ if (span == null) {
+ return null;
+ }
+ traces.add(span);
+ JSONObject json = new JsonTraceSpanMapper().map(span);
+ return new Payload(json, Track.APM);
}
}
@@ -359,7 +381,7 @@ public List getWebhooks() {
public List getSpans() {
ArrayList spans = new ArrayList<>(traceWriteStrategy.traces);
Collections.sort(spans, (span1, span2) -> {
- if(span1.getStartNano() < span2.getStartNano()){
+ if (span1.getStartNano() < span2.getStartNano()) {
return -1;
} else if (span1.getStartNano() > span2.getStartNano()) {
return 1;
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java
index f121a2aaa..79e194405 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/clients/DatadogClientTest.java
@@ -110,8 +110,8 @@ public void testDogstatsDClientGetInstanceEnableValidations() {
public void testEvpProxyEnabled() {
DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor();
cfg.setEnableCiVisibility(true);
- DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236));
- Mockito.doReturn(new HashSet(Arrays.asList("/evp_proxy/v3/"))).when(client).fetchAgentSupportedEndpoints();
+ DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236, 1_000));
+ Mockito.doReturn(new HashSet<>(Arrays.asList("/evp_proxy/v3/"))).when(client).fetchAgentSupportedEndpoints();
Assert.assertTrue(client.isEvpProxySupported());
}
@@ -119,7 +119,7 @@ public void testEvpProxyEnabled() {
public void testEvpProxyDisabled() {
DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor();
cfg.setEnableCiVisibility(true);
- DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236));
+ DatadogAgentClient client = Mockito.spy(new DatadogAgentClient("test",1234, 1235, 1236, 1_000));
Mockito.doReturn(new HashSet()).when(client).fetchAgentSupportedEndpoints();
Assert.assertFalse(client.isEvpProxySupported());
}
@@ -128,7 +128,7 @@ public void testEvpProxyDisabled() {
public void testEmptyAgentSupportedEndpointsWithNoAgent() {
DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor();
cfg.setEnableCiVisibility(true);
- DatadogAgentClient client = new DatadogAgentClient("test", 1234, 1235, 1236);
+ DatadogAgentClient client = new DatadogAgentClient("test", 1234, 1235, 1236, 1_000);
Assert.assertTrue(client.fetchAgentSupportedEndpoints().isEmpty());
}
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java
index f552a7b71..f37dc8f20 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerIT.java
@@ -25,11 +25,14 @@
import hudson.model.FreeStyleBuild;
import hudson.model.FreeStyleProject;
import hudson.model.Label;
+import hudson.plugins.git.BranchSpec;
+import hudson.plugins.git.GitSCM;
+import hudson.plugins.git.extensions.impl.LocalBranch;
import hudson.slaves.DumbSlave;
import hudson.slaves.EnvironmentVariablesNodeProperty;
import java.io.IOException;
import java.io.InputStream;
-import java.net.URL;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@@ -40,13 +43,15 @@
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.clients.ClientFactory;
import org.datadog.jenkins.plugins.datadog.clients.DatadogClientStub;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.CITags;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
+import org.jetbrains.annotations.NotNull;
+import org.junit.AfterClass;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
-import org.jvnet.hudson.test.ExtractResourceSCM;
import org.jvnet.hudson.test.JenkinsRule;
public class DatadogBuildListenerIT extends DatadogTraceAbstractTest {
@@ -54,9 +59,33 @@ public class DatadogBuildListenerIT extends DatadogTraceAbstractTest {
private static final String SAMPLE_SERVICE_NAME = "sampleServiceName";
@ClassRule
- public static JenkinsRule jenkinsRule = new JenkinsRule();
+ public static final JenkinsRule jenkinsRule = new JenkinsRule();
+
+ private static FilePath localGitRepoPath;
+
private DatadogClientStub clientStub;
+ @BeforeClass
+ public static void setUp() {
+ // to allow checkout from local git repositories - needed for some tests
+ GitSCM.ALLOW_LOCAL_CHECKOUT = true;
+
+ localGitRepoPath = jenkinsRule.jenkins.getRootPath().child("tmp").child("git-repo");
+ try (InputStream gitZip = DatadogBuildListenerIT.class.getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) {
+ localGitRepoPath.deleteRecursive();
+ localGitRepoPath.mkdirs();
+ localGitRepoPath.unzipFrom(gitZip);
+
+ } catch (Exception e) {
+ throw new RuntimeException("Could not create local git repo at " + localGitRepoPath.getRemote(), e);
+ }
+ }
+
+ @AfterClass
+ public static void tearDown() throws IOException, InterruptedException {
+ localGitRepoPath.deleteRecursive();
+ }
+
@Before
public void beforeEach() throws IOException {
DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor();
@@ -112,30 +141,31 @@ public void testTraces() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL", toUrl(localGitRepoPath.getRemote()));
jenkins.getGlobalNodeProperties().add(prop);
final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccess");
+
+ GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master")));
+ project.setScm(git);
+
final FilePath ws = jenkins.getWorkspaceFor(project);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
FreeStyleBuild run = project.scheduleBuild2(0).get();
- final String buildPrefix = BuildPipelineNode.NodeType.PIPELINE.getTagName();
+ final String buildPrefix = PipelineStepData.StepType.PIPELINE.getTagName();
clientStub.waitForTraces(1);
final List spans = clientStub.getSpans();
assertEquals(1, spans.size());
final TraceSpan buildSpan = spans.get(0);
- assertGitVariablesOnSpan(buildSpan, "master");
+ assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote()));
final Map meta = buildSpan.getMeta();
final Map metrics = buildSpan.getMetrics();
- assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_BUILD_LEVEL));
- assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_LEVEL));
+ assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_BUILD_LEVEL));
+ assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), meta.get(CITags._DD_CI_LEVEL));
assertEquals(ORIGIN_CIAPP_PIPELINE, meta.get(CITags._DD_ORIGIN));
assertEquals("jenkins.build", buildSpan.getOperationName());
assertEquals(SAMPLE_SERVICE_NAME, buildSpan.getServiceName());
@@ -156,12 +186,24 @@ public void testTraces() throws Exception {
checkHostNameTag(meta);
assertEquals("success", meta.get(CITags.JENKINS_RESULT));
assertEquals("jenkins-buildIntegrationSuccess-1", meta.get(CITags.JENKINS_TAG));
- assertNotNull(meta.get(CITags._DD_CI_STAGES));
- assertEquals("[]", meta.get(CITags._DD_CI_STAGES));
+ assertNull(meta.get(CITags._DD_CI_STAGES)); // this is a freestyle project which has no stages
assertCleanupActions(run);
}
+ @NotNull
+ private static String toUrl(String path) {
+ if (isRunningOnWindows()) {
+ return "file:///" + path.replace('\\', '/');
+ } else {
+ return "file://" + path;
+ }
+ }
+
+ private static boolean isRunningOnWindows() {
+ return System.getProperty("os.name").toLowerCase().contains("win");
+ }
+
@Test
public void testGitDefaultBranch() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
@@ -169,16 +211,16 @@ public void testGitDefaultBranch() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL", toUrl(localGitRepoPath.getRemote()));
final String defaultBranch = "refs/heads/hardcoded-master";
env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch);
jenkins.getGlobalNodeProperties().add(prop);
final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessDefaultBranch");
- final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- project.setScm(new ExtractResourceSCM(gitZip));
- }
+
+ GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master")));
+ project.setScm(git);
+
project.scheduleBuild2(0).get();
clientStub.waitForTraces(1);
@@ -186,7 +228,7 @@ public void testGitDefaultBranch() throws Exception {
assertEquals(1, spans.size());
final TraceSpan buildSpan = spans.get(0);
- assertGitVariablesOnSpan(buildSpan, "hardcoded-master");
+ assertGitVariablesOnSpan(buildSpan, "hardcoded-master", toUrl(localGitRepoPath.getRemote()));
}
@Test
@@ -198,7 +240,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception {
env.put(GIT_BRANCH, "not-valid-branch");
env.put(GIT_COMMIT, "not-valid-commit");
- env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git");
+ env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote()));
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
final String defaultBranch = "refs/heads/hardcoded-master";
@@ -206,10 +248,10 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception {
jenkins.getGlobalNodeProperties().add(prop);
final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithoutCommitInfo");
- final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- project.setScm(new ExtractResourceSCM(gitZip));
- }
+
+ GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master")));
+ project.setScm(git);
+
project.scheduleBuild2(0).get();
clientStub.waitForTraces(1);
@@ -217,7 +259,7 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception {
assertEquals(1, spans.size());
final TraceSpan buildSpan = spans.get(0);
- assertGitVariablesOnSpan(buildSpan, "hardcoded-master");
+ assertGitVariablesOnSpan(buildSpan, "hardcoded-master", toUrl(localGitRepoPath.getRemote()));
}
@Test
@@ -228,7 +270,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception {
env.put(GIT_REPOSITORY_URL, "not-valid-repo");
env.put(GIT_BRANCH, "not-valid-branch");
env.put(GIT_COMMIT, "not-valid-commit");
- env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git");
+ env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote()));
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message");
@@ -243,10 +285,10 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception {
jenkins.getGlobalNodeProperties().add(prop);
final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfo");
- final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- project.setScm(new ExtractResourceSCM(gitZip));
- }
+
+ GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master")));
+ project.setScm(git);
+
project.scheduleBuild2(0).get();
clientStub.waitForTraces(1);
@@ -265,7 +307,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception {
assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA));
assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA));
assertEquals("master", meta.get(CITags.GIT_BRANCH));
- assertEquals("https://github.com/johndoe/foobar.git", meta.get(CITags.GIT_REPOSITORY_URL));
+ assertEquals(toUrl(localGitRepoPath.getRemote()), meta.get(CITags.GIT_REPOSITORY_URL));
assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH));
}
@@ -279,7 +321,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception {
env.put(GIT_REPOSITORY_URL, "not-valid-repo");
env.put(GIT_BRANCH, "not-valid-branch");
env.put(GIT_COMMIT, "not-valid-commit");
- env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git");
+ env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote()));
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message");
@@ -294,10 +336,10 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception {
jenkins.getGlobalNodeProperties().add(prop);
final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessUserSuppliedGitWithCommitInfoWebhook");
- final URL gitZip = getClass().getClassLoader().getResource("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- project.setScm(new ExtractResourceSCM(gitZip));
- }
+
+ GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master")));
+ project.setScm(git);
+
project.scheduleBuild2(0).get();
clientStub.waitForWebhooks(1);
@@ -315,7 +357,7 @@ public void testUserSuppliedGitWithCommitInfoWebhook() throws Exception {
assertEquals("hardcoded-committer-date", meta.getString("commit_time"));
assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.getString("sha"));
assertEquals("master", meta.getString("branch"));
- assertEquals("https://github.com/johndoe/foobar.git", meta.getString("repository_url"));
+ assertEquals(toUrl(localGitRepoPath.getRemote()), meta.getString("repository_url"));
assertEquals("hardcoded-master", meta.getString("default_branch"));
}
@@ -382,17 +424,17 @@ public void testGitAlternativeRepoUrlWebhook() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL_1", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL_1", toUrl(localGitRepoPath.getRemote()));
jenkins.getGlobalNodeProperties().add(prop);
final FreeStyleProject project = jenkinsRule.createFreeStyleProject("buildIntegrationSuccessAltRepoUrlWebhook");
+
+ GitSCM git = new GitSCM(GitSCM.createRepoList(toUrl(localGitRepoPath.getRemote()), null), Collections.singletonList(new BranchSpec("*/master")), null, null, Collections.singletonList(new LocalBranch("master")));
+ project.setScm(git);
+
final FilePath ws = jenkins.getWorkspaceFor(project);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
project.scheduleBuild2(0).get();
@@ -401,7 +443,7 @@ public void testGitAlternativeRepoUrlWebhook() throws Exception {
assertEquals(1, webhooks.size());
final JSONObject webhook = webhooks.get(0);
- assertGitVariablesOnWebhook(webhook, "master");
+ assertGitVariablesOnWebhook(webhook, "master", toUrl(localGitRepoPath.getRemote()));
}
@Test
@@ -419,6 +461,8 @@ public void testTracesDisabled() throws Exception {
@Test
public void testTracesDisabledWebhooks() throws Exception {
+ clientStub.configureForWebhooks();
+
DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor();
cfg.setEnableCiVisibility(false);
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java
index b9622dbfe..4d7058ada 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogBuildListenerTest.java
@@ -28,6 +28,7 @@ of this software and associated documentation files (the "Software"), to deal
import com.cloudbees.workflow.rest.external.StageNodeExt;
import hudson.EnvVars;
import hudson.model.*;
+import java.nio.charset.Charset;
import jenkins.model.Jenkins;
import org.datadog.jenkins.plugins.datadog.DatadogEvent.AlertType;
import org.datadog.jenkins.plugins.datadog.DatadogEvent.Priority;
@@ -82,7 +83,7 @@ public void setUpMocks() {
envVars.put("GIT_BRANCH", "test-branch");
workflowRun = mock(WorkflowRun.class);
-
+ when(workflowRun.getCharset()).thenReturn(Charset.defaultCharset());
}
@Test
diff --git a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java
index 12c3f19af..95d0ac7af 100644
--- a/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java
+++ b/src/test/java/org/datadog/jenkins/plugins/datadog/listeners/DatadogGraphListenerTest.java
@@ -35,12 +35,16 @@
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
@@ -48,7 +52,7 @@
import org.datadog.jenkins.plugins.datadog.DatadogUtilities;
import org.datadog.jenkins.plugins.datadog.clients.ClientFactory;
import org.datadog.jenkins.plugins.datadog.clients.DatadogClientStub;
-import org.datadog.jenkins.plugins.datadog.model.BuildPipelineNode;
+import org.datadog.jenkins.plugins.datadog.model.PipelineStepData;
import org.datadog.jenkins.plugins.datadog.traces.CITags;
import org.datadog.jenkins.plugins.datadog.traces.message.TraceSpan;
import org.jenkinsci.plugins.workflow.actions.LabelAction;
@@ -62,7 +66,10 @@
import org.jenkinsci.plugins.workflow.graph.BlockStartNode;
import org.jenkinsci.plugins.workflow.job.WorkflowJob;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
+import org.jetbrains.annotations.NotNull;
+import org.junit.AfterClass;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.jvnet.hudson.test.JenkinsRule;
@@ -74,9 +81,32 @@ public class DatadogGraphListenerTest extends DatadogTraceAbstractTest {
@ClassRule
public static JenkinsRule jenkinsRule;
+ private static FilePath localGitRepoPath;
+
static {
+ // to allow checkout from local git repositories - needed for some tests
+ System.setProperty("hudson.plugins.git.GitSCM.ALLOW_LOCAL_CHECKOUT", "true");
+
jenkinsRule = new JenkinsRule();
- jenkinsRule.timeout = 300; // default value of 180 is too small for all the test cases in this class
+ jenkinsRule.timeout = 600; // default value of 180 is too small for all the test cases in this class
+ }
+
+ @BeforeClass
+ public static void setUp() {
+ localGitRepoPath = jenkinsRule.jenkins.getRootPath().child("tmp").child("git-repo");
+ try (InputStream gitZip = DatadogBuildListenerIT.class.getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) {
+ localGitRepoPath.deleteRecursive();
+ localGitRepoPath.mkdirs();
+ localGitRepoPath.unzipFrom(gitZip);
+
+ } catch (Exception e) {
+ throw new RuntimeException("Could not create local git repo at " + localGitRepoPath.getRemote(), e);
+ }
+ }
+
+ @AfterClass
+ public static void tearDown() throws IOException, InterruptedException {
+ localGitRepoPath.deleteRecursive();
}
private DatadogGraphListener listener;
@@ -147,12 +177,12 @@ public void testNewNode() throws IOException {
@Test
public void testIntegration() throws Exception {
- jenkinsRule.createOnlineSlave(new LabelAtom("windows"));
+ EnvVars windowsEnvVars = new EnvVars();
+ String windowsHostname = "windows-hostname";
+ windowsEnvVars.put("HOSTNAME", windowsHostname);
+ jenkinsRule.createOnlineSlave(new LabelAtom("windows"), windowsEnvVars);
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineDefinition.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineDefinition.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
WorkflowRun run = job.scheduleBuild2(0).get();
BufferedReader br = new BufferedReader(run.getLogReader());
@@ -161,7 +191,6 @@ public void testIntegration() throws Exception {
System.out.println(s);
}
br.close();
- String hostname = DatadogUtilities.getHostname(null);
String[] baseTags = new String[]{
"jenkins_url:" + DatadogUtilities.getJenkinsUrl(),
"user_id:anonymous",
@@ -174,13 +203,23 @@ public void testIntegration() throws Exception {
String[] parentNames = new String[]{ "Test On Windows", "Test On Windows", "Test On Windows", "Parallel tests", "Parallel tests", "root", "root" };
for (int i = 0; i < depths.length; i++) {
+ String hostname;
+ String stageName = stageNames[i];
+ if (stageName.contains("Windows")) {
+ // agent { label "windows" }
+ hostname = windowsHostname;
+ } else {
+ // agent { label "built-in" }
+ hostname = DatadogUtilities.getHostname(null);
+ }
+
String[] expectedTags = Arrays.copyOf(baseTags, baseTags.length + 3);
expectedTags[expectedTags.length - 3] = "stage_depth:" + depths[i];
expectedTags[expectedTags.length - 2] = "stage_name:" + stageNames[i];
expectedTags[expectedTags.length - 1] = "parent_stage_name:" + parentNames[i];
clientStub.assertMetric("jenkins.job.stage_duration", hostname, expectedTags);
- if (stageNames[i] == "Test On Linux" || stageNames[i] == "Parallel tests") {
+ if (stageName.equals("Test On Linux") || stageName.equals("Parallel tests")) {
// Timeout is set to 11s, but since there are other instructions,
// we test it's at least 10s.
double pauseValue = clientStub.assertMetricGetValue("jenkins.job.stage_pause_duration", hostname, expectedTags);
@@ -203,22 +242,18 @@ public void testIntegrationGitInfo() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL", toUrl(localGitRepoPath.getRemote()));
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommit");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -226,19 +261,25 @@ public void testIntegrationGitInfo() throws Exception {
final List spans = clientStub.getSpans();
assertEquals(3, spans.size());
final TraceSpan buildSpan = spans.get(0);
- assertGitVariablesOnSpan(buildSpan, "master");
+ assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote()));
+ }
+
+ @NotNull
+ private static String toUrl(String path) {
+ if (isRunningOnWindows()) {
+ return "file:///" + path.replace('\\', '/');
+ } else {
+ return "file://" + path;
+ }
}
@Test
public void testIntegrationNonCIVisibilityEnvVars() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "testPipelineGitBranchEnv");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineGitBranchEnv.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineGitBranchEnv.txt");
- String[] expectedTags = new String[]{
+ String[] expectedTags = new String[]{
"jenkins_url:" + DatadogUtilities.getJenkinsUrl(),
"user_id:anonymous",
"job:testPipelineGitBranchEnv",
@@ -261,29 +302,25 @@ public void testIntegrationGitInfoWebhooks() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL", toUrl(localGitRepoPath.getRemote()));
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitWebhooks");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
clientStub.waitForWebhooks(3);
final List webhook = clientStub.getWebhooks();
assertEquals(3, webhook.size());
- assertGitVariablesOnWebhook(webhook.get(0), "master");
+ assertGitVariablesOnWebhook(webhook.get(0), "master", toUrl(localGitRepoPath.getRemote()));
}
@Test
@@ -293,24 +330,20 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL", toUrl(localGitRepoPath.getRemote()));
final String defaultBranch = "refs/heads/hardcoded-master";
env.put("DD_GIT_DEFAULT_BRANCH", defaultBranch);
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSingleCommitDefaultBranchEnvVar");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -318,7 +351,7 @@ public void testIntegrationGitInfoDefaultBranchEnvVar() throws Exception {
final List spans = clientStub.getSpans();
assertEquals(3, spans.size());
final TraceSpan buildSpan = spans.get(0);
- assertGitVariablesOnSpan(buildSpan, "hardcoded-master");
+ assertGitVariablesOnSpan(buildSpan, "hardcoded-master", toUrl(localGitRepoPath.getRemote()));
}
@Test
@@ -328,22 +361,18 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL", toUrl(localGitRepoPath.getRemote()));
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationOverrideCommit");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelinesOverrideGitCommit.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -355,6 +384,27 @@ public void testIntegrationGitInfoOverrideCommit() throws Exception {
}
}
+ private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\$([A-Z_]+)");
+
+ private String getPipelineDefinition(String file) throws IOException {
+ Map replacements = Collections.singletonMap("LOCAL_REPO_URL", toUrl(localGitRepoPath.getRemote()));
+
+ String pipelineDefinition;
+ try (InputStream is = DatadogGraphListenerTest.class.getResourceAsStream(file)) {
+ StringBuffer pipelineBuilder = new StringBuffer();
+ String pipelineTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
+ Matcher m = PLACEHOLDER_PATTERN.matcher(pipelineTemplate);
+ while (m.find()) {
+ String placeholder = m.group(1);
+ m.appendReplacement(pipelineBuilder, replacements.get(placeholder));
+ }
+ m.appendTail(pipelineBuilder);
+ pipelineDefinition = pipelineBuilder.toString();
+ }
+
+ return pipelineDefinition;
+ }
+
@Test
public void testIntegrationGitAlternativeRepoUrl() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
@@ -362,22 +412,18 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL_1", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL_1", toUrl(localGitRepoPath.getRemote()));
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrl");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelinesOverrideGitCommit.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -385,7 +431,7 @@ public void testIntegrationGitAlternativeRepoUrl() throws Exception {
final List spans = clientStub.getSpans();
assertEquals(5, spans.size());
for(TraceSpan span : spans) {
- assertEquals("https://github.com/johndoe/foobar.git", span.getMeta().get(CITags.GIT_REPOSITORY_URL));
+ assertEquals(toUrl(localGitRepoPath.getRemote()), span.getMeta().get(CITags.GIT_REPOSITORY_URL));
}
}
@@ -398,22 +444,18 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception {
EnvVars env = prop.getEnvVars();
env.put("GIT_BRANCH", "master");
env.put("GIT_COMMIT", "401d997a6eede777602669ccaec059755c98161f");
- env.put("GIT_URL_1", "https://github.com/johndoe/foobar.git");
+ env.put("GIT_URL_1", toUrl(localGitRepoPath.getRemote()));
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationAltRepoUrlWebhooks");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelinesOverrideGitCommit.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelinesOverrideGitCommit.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -421,7 +463,7 @@ public void testIntegrationGitAlternativeRepoUrlWebhooks() throws Exception {
final List webhooks = clientStub.getWebhooks();
assertEquals(5, webhooks.size());
for(JSONObject webhook : webhooks) {
- assertEquals("https://github.com/johndoe/foobar.git", webhook.getJSONObject("git").get("repository_url"));
+ assertEquals(toUrl(localGitRepoPath.getRemote()), webhook.getJSONObject("git").get("repository_url"));
}
}
@@ -430,25 +472,21 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty();
EnvVars env = prop.getEnvVars();
- env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git");
+ env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote()));
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
env.put(DD_GIT_TAG, "0.1.0");
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithoutCommitInfo");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -456,11 +494,20 @@ public void testUserSuppliedGitWithoutCommitInfo() throws Exception {
final List spans = clientStub.getSpans();
assertEquals(3, spans.size());
final TraceSpan buildSpan = spans.get(0);
- assertGitVariablesOnSpan(buildSpan, "master");
+ assertGitVariablesOnSpan(buildSpan, "master", toUrl(localGitRepoPath.getRemote()));
final Map meta = buildSpan.getMeta();
assertEquals("0.1.0", meta.get(CITags.GIT_TAG));
}
+ private void createLocallyAvailableGitRepo(Jenkins jenkins) throws IOException, InterruptedException {
+ try (InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip")) {
+ FilePath gitRepoPath = jenkins.createPath("/tmp/git-repo");
+ gitRepoPath.deleteRecursive();
+ gitRepoPath.mkdirs();
+ gitRepoPath.unzipFrom(gitZip);
+ }
+ }
+
@Test
public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception {
clientStub.configureForWebhooks();
@@ -468,25 +515,21 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty();
EnvVars env = prop.getEnvVars();
- env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git");
+ env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote()));
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
env.put(DD_GIT_TAG, "0.1.0");
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithoutCommitInfoWebhooks");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -494,7 +537,7 @@ public void testUserSuppliedGitWithoutCommitInfoWebhooks() throws Exception {
final List webhooks = clientStub.getWebhooks();
assertEquals(3, webhooks.size());
final JSONObject webhook = webhooks.get(0);
- assertGitVariablesOnWebhook(webhook, "master");
+ assertGitVariablesOnWebhook(webhook, "master", toUrl(localGitRepoPath.getRemote()));
assertEquals("0.1.0", webhook.getJSONObject("git").get("tag"));
}
@@ -503,7 +546,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
final EnvironmentVariablesNodeProperty prop = new EnvironmentVariablesNodeProperty();
EnvVars env = prop.getEnvVars();
- env.put(DD_GIT_REPOSITORY_URL, "https://github.com/johndoe/foobar.git");
+ env.put(DD_GIT_REPOSITORY_URL, toUrl(localGitRepoPath.getRemote()));
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
env.put(DD_GIT_COMMIT_MESSAGE, "hardcoded-message");
@@ -516,19 +559,15 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception {
final String defaultBranch = "refs/heads/hardcoded-master";
env.put(DD_GIT_DEFAULT_BRANCH, defaultBranch);
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationUserSuppliedGitWithCommitInfo");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccessLocalCheckout.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final FilePath ws = jenkins.getWorkspaceFor(job);
env.put("NODE_NAME", "master");
env.put("WORKSPACE", ws.getRemote());
- InputStream gitZip = getClass().getClassLoader().getResourceAsStream("org/datadog/jenkins/plugins/datadog/listeners/git/gitFolder.zip");
- if(gitZip != null) {
- ws.unzipFrom(gitZip);
- }
+
+ createLocallyAvailableGitRepo(jenkins);
+
jenkins.getGlobalNodeProperties().add(prop);
job.scheduleBuild2(0).get();
@@ -547,7 +586,7 @@ public void testUserSuppliedGitWithCommitInfo() throws Exception {
assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT__SHA));
assertEquals("401d997a6eede777602669ccaec059755c98161f", meta.get(CITags.GIT_COMMIT_SHA));
assertEquals("master", meta.get(CITags.GIT_BRANCH));
- assertEquals("https://github.com/johndoe/foobar.git", meta.get(CITags.GIT_REPOSITORY_URL));
+ assertEquals(toUrl(localGitRepoPath.getRemote()), meta.get(CITags.GIT_REPOSITORY_URL));
assertEquals("hardcoded-master", meta.get(CITags.GIT_DEFAULT_BRANCH));
}
@@ -560,10 +599,7 @@ public void testRawRepositoryUrl() throws Exception {
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationRawRepositoryUrl");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
jenkins.getGlobalNodeProperties().add(prop);
@@ -589,10 +625,7 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception {
env.put(DD_GIT_BRANCH, "master");
env.put(DD_GIT_COMMIT_SHA, "401d997a6eede777602669ccaec059755c98161f");
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationFilterSensitiveInfoRepoUrl");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
jenkins.getGlobalNodeProperties().add(prop);
@@ -612,10 +645,7 @@ public void testFilterSensitiveInfoRepoUrl() throws Exception {
@Test
public void testStageNamePropagation() throws Exception{
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationStages");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineStages.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineStages.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
new Thread(() -> {
try {
@@ -631,18 +661,18 @@ public void testStageNamePropagation() throws Exception{
assertEquals(6, spans.size());
final TraceSpan stage1 = searchSpan(spans, "Stage 1");
- final String stage1Name = stage1.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME);
+ final String stage1Name = stage1.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME);
assertTrue(stage1Name != null && !stage1Name.isEmpty());
final TraceSpan stepStage1 = searchFirstChild(spans, stage1);
- assertEquals(stage1Name, stepStage1.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME));
+ assertEquals(stage1Name, stepStage1.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME));
final TraceSpan stage2 = searchSpan(spans, "Stage 2");
- final String stage2Name = stage2.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME);
+ final String stage2Name = stage2.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME);
assertTrue(stage2Name != null && !stage2Name.isEmpty());
final TraceSpan stepStage2 = searchFirstChild(spans, stage2);
- assertEquals(stage2Name, stepStage2.getMeta().get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME));
+ assertEquals(stage2Name, stepStage2.getMeta().get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME));
}
@Test
@@ -650,10 +680,7 @@ public void testStageNamePropagationWebhook() throws Exception{
clientStub.configureForWebhooks();
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationStagesWebhook");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineStagesWebhook.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineStagesWebhook.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
new Thread(() -> {
try {
@@ -693,10 +720,7 @@ public void testStageNamePropagationWebhook() throws Exception{
@Test
public void testIntegrationPipelineQueueTimeOnStages() throws Exception {
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationQueueTimeOnStages");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineQueueOnStages.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineQueueOnStages.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
// schedule build and wait for it to get queued
new Thread(() -> {
@@ -713,12 +737,12 @@ public void testIntegrationPipelineQueueTimeOnStages() throws Exception {
final List spans = clientStub.getSpans();
assertEquals(6, spans.size());
- final TraceSpan buildSpan = spans.get(0);
+ final TraceSpan buildSpan = searchSpan(spans, "pipelineIntegrationQueueTimeOnStages");
assertEquals(Double.valueOf(0), buildSpan.getMetrics().get(CITags.QUEUE_TIME));
assertEquals("built-in", buildSpan.getMeta().get(CITags.NODE_NAME));
assertEquals("[\"built-in\"]", buildSpan.getMeta().get(CITags.NODE_LABELS));
- final TraceSpan runStages = spans.get(1);
+ final TraceSpan runStages = searchSpan(spans, "Run stages");
assertEquals(Double.valueOf(0), runStages.getMetrics().get(CITags.QUEUE_TIME));
assertEquals("built-in", runStages.getMeta().get(CITags.NODE_NAME));
assertEquals("[\"built-in\"]", runStages.getMeta().get(CITags.NODE_LABELS));
@@ -801,10 +825,7 @@ public void testIntegrationPipelineQueueTimeOnPipeline() throws Exception {
env.put("NODE_NAME", "testPipeline");
jenkinsRule.jenkins.getGlobalNodeProperties().add(envProps);
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationQueueTimeOnPipeline");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineQueueOnPipeline.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineQueueOnPipeline.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
// schedule build and wait for it to get queued
@@ -846,12 +867,9 @@ public void testIntegrationPipelineQueueTimeOnPipeline() throws Exception {
@Test
public void testIntegrationNoFailureTag() throws Exception {
- jenkinsRule.createOnlineSlave(new LabelAtom("windows"));
+ jenkinsRule.createOnlineSlave();
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSuccess");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
final WorkflowRun run = job.scheduleBuild2(0).get();
String hostname = DatadogUtilities.getHostname(null);
@@ -867,9 +885,9 @@ public void testIntegrationNoFailureTag() throws Exception {
clientStub.assertMetric("jenkins.job.stage_duration", hostname, tags);
clientStub.assertMetric("jenkins.job.stage_pause_duration", 0, hostname, tags);
- final String buildPrefix = BuildPipelineNode.NodeType.PIPELINE.getTagName();
- final String stagePrefix = BuildPipelineNode.NodeType.STAGE.getTagName();
- final String stepPrefix = BuildPipelineNode.NodeType.STEP.getTagName();
+ final String buildPrefix = PipelineStepData.StepType.PIPELINE.getTagName();
+ final String stagePrefix = PipelineStepData.StepType.STAGE.getTagName();
+ final String stepPrefix = PipelineStepData.StepType.STEP.getTagName();
clientStub.waitForTraces(3);
final List spans = clientStub.getSpans();
@@ -896,8 +914,8 @@ public void testIntegrationNoFailureTag() throws Exception {
assertEquals("success", buildSpanMeta.get(CITags.JENKINS_RESULT));
assertEquals("jenkins-pipelineIntegrationSuccess-1", buildSpanMeta.get(CITags.JENKINS_TAG));
assertEquals("false", buildSpanMeta.get(CITags._DD_CI_INTERNAL));
- assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_BUILD_LEVEL));
- assertEquals(BuildPipelineNode.NodeType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_LEVEL));
+ assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_BUILD_LEVEL));
+ assertEquals(PipelineStepData.StepType.PIPELINE.getBuildLevel(), buildSpanMeta.get(CITags._DD_CI_LEVEL));
assertNotNull(buildSpanMeta.get(CITags._DD_CI_STAGES));
assertTrue(buildSpanMeta.get(CITags._DD_CI_STAGES).contains("{\"name\":\"test\",\"duration\""));
@@ -914,13 +932,13 @@ public void testIntegrationNoFailureTag() throws Exception {
assertNotNull(stageSpanMeta.get(stagePrefix + CITags._URL));
assertNotNull(stageSpanMeta.get(CITags.NODE_NAME));
assertNotNull(stageSpanMeta.get(CITags.NODE_LABELS));
- checkHostNameTag(buildSpanMeta);
+ checkHostNameTag(stageSpanMeta);
assertEquals("false", stageSpanMeta.get(CITags._DD_CI_INTERNAL));
assertEquals("4", stageSpanMeta.get(stagePrefix + CITags._NUMBER));
- assertEquals(BuildPipelineNode.NodeType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_BUILD_LEVEL));
- assertEquals(BuildPipelineNode.NodeType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_LEVEL));
- assertEquals("jenkins-pipelineIntegrationSuccess-1", stageSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._ID));
- assertEquals("pipelineIntegrationSuccess", stageSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._NAME));
+ assertEquals(PipelineStepData.StepType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_BUILD_LEVEL));
+ assertEquals(PipelineStepData.StepType.STAGE.getBuildLevel(), stageSpanMeta.get(CITags._DD_CI_LEVEL));
+ assertEquals("jenkins-pipelineIntegrationSuccess-1", stageSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._ID));
+ assertEquals("pipelineIntegrationSuccess", stageSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._NAME));
assertNotNull(stageSpan.getMetrics().get(CITags.QUEUE_TIME));
final TraceSpan stepSpan = spans.get(2);
@@ -941,11 +959,11 @@ public void testIntegrationNoFailureTag() throws Exception {
checkHostNameTag(stepSpanMeta);
assertEquals("false", stepSpanMeta.get(CITags._DD_CI_INTERNAL));
assertEquals("5", stepSpanMeta.get(stepPrefix + CITags._NUMBER));
- assertEquals(BuildPipelineNode.NodeType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_BUILD_LEVEL));
- assertEquals(BuildPipelineNode.NodeType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_LEVEL));
- assertEquals("jenkins-pipelineIntegrationSuccess-1", stepSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._ID));
- assertEquals("pipelineIntegrationSuccess", stepSpanMeta.get(BuildPipelineNode.NodeType.PIPELINE.getTagName() + CITags._NAME));
- assertEquals("test", stepSpanMeta.get(BuildPipelineNode.NodeType.STAGE.getTagName() + CITags._NAME));
+ assertEquals(PipelineStepData.StepType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_BUILD_LEVEL));
+ assertEquals(PipelineStepData.StepType.STEP.getBuildLevel(), stepSpanMeta.get(CITags._DD_CI_LEVEL));
+ assertEquals("jenkins-pipelineIntegrationSuccess-1", stepSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._ID));
+ assertEquals("pipelineIntegrationSuccess", stepSpanMeta.get(PipelineStepData.StepType.PIPELINE.getTagName() + CITags._NAME));
+ assertEquals("test", stepSpanMeta.get(PipelineStepData.StepType.STAGE.getTagName() + CITags._NAME));
assertNotNull(stepSpan.getMetrics().get(CITags.QUEUE_TIME));
assertCleanupActions(run);
@@ -954,10 +972,7 @@ public void testIntegrationNoFailureTag() throws Exception {
@Test
public void testIntegrationPipelineSkippedLogic() throws Exception {
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-SkippedLogic");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSkippedLogic.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSkippedLogic.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -975,10 +990,7 @@ public void testIntegrationPipelineSkippedLogicWebhook() throws Exception {
clientStub.configureForWebhooks();
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-SkippedLogicWebhook");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSkippedLogic.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSkippedLogic.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -986,9 +998,10 @@ public void testIntegrationPipelineSkippedLogicWebhook() throws Exception {
final List webhooks = clientStub.getWebhooks();
assertEquals(2, webhooks.size());
- final JSONObject webhook = webhooks.get(1);
- assertEquals("Stage", webhook.getString("name"));
- assertEquals("skipped", webhook.getString("status"));
+ final JSONObject stage = searchWebhookByLevel(webhooks, "stage");
+ assertNotNull("Could not find stage webhook", stage);
+ assertEquals("Stage", stage.getString("name"));
+ assertEquals("skipped", stage.getString("status"));
}
@Test
@@ -996,12 +1009,9 @@ public void testIntegrationTracesDisabled() throws Exception{
DatadogGlobalConfiguration cfg = DatadogUtilities.getDatadogGlobalDescriptor();
cfg.setEnableCiVisibility(false);
- jenkinsRule.createOnlineSlave(new LabelAtom("windows"));
+ jenkinsRule.createOnlineSlave();
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationSuccess-notraces");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
String hostname = DatadogUtilities.getHostname(null);
@@ -1061,10 +1071,7 @@ public void getTimeTest() {
@Test
public void testStagesNodeNames_complexPipelineStages01() throws Exception {
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "complexPipelineStages01");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineComplexStages01.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineComplexStages01.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
// schedule build and wait for it to get queued
new Thread(() -> {
@@ -1088,8 +1095,6 @@ public void testStagesNodeNames_complexPipelineStages01() throws Exception {
final TraceSpan prepareBlock = spans.get(1);
assertEquals("Prepare", prepareBlock.getResourceName());
- assertEquals(worker03.getNodeName(), prepareBlock.getMeta().get(CITags.NODE_NAME));
- assertTrue(prepareBlock.getMeta().get(CITags.NODE_LABELS).contains(worker03.getNodeName()));
final TraceSpan prepareStage01 = spans.get(2);
assertNodeNameParallelBlock(prepareStage01, worker01, worker02);
@@ -1166,10 +1171,7 @@ public void testGlobalTagsPropagationsTraces() throws Exception {
jenkinsRule.createOnlineSlave(new LabelAtom("testGlobalTags"));
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-GlobalTagsPropagation_job");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineGlobalTags.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineGlobalTags.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -1202,10 +1204,7 @@ public void testGlobalTagsPropagationsTraces() throws Exception {
@Test
public void testErrorPropagationOnStages() throws Exception {
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-errorPropagationStages");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream(getFailingPipelineDefinitionName()),
- "UTF-8"
- );
+ String definition = getPipelineDefinition(getFailingPipelineDefinitionName());
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -1232,10 +1231,7 @@ public void testErrorPropagationOnStagesWebhook() throws Exception {
clientStub.configureForWebhooks();
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-errorPropagationStagesWebhook");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream(getFailingPipelineDefinitionName()),
- "UTF-8"
- );
+ String definition = getPipelineDefinition(getFailingPipelineDefinitionName());
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -1261,17 +1257,14 @@ private String getFailingPipelineDefinitionName() {
return isRunningOnWindows() ? "testPipelineErrorOnStagesOnWindows.txt" : "testPipelineErrorOnStages.txt";
}
- private boolean isRunningOnWindows() {
+ private static boolean isRunningOnWindows() {
return System.getProperty("os.name").toLowerCase().contains("win");
}
@Test
public void testUnstablePropagationOnStages() throws Exception {
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-unstablePropagationStages");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineUnstableOnStages.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineUnstableOnStages.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -1298,10 +1291,7 @@ public void testUnstablePropagationOnStagesWebhook() throws Exception {
clientStub.configureForWebhooks();
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegration-unstablePropagationStagesWebhook");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineUnstableOnStages.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineUnstableOnStages.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
job.scheduleBuild2(0).get();
@@ -1329,10 +1319,7 @@ public void testCustomHostnameForWorkers() throws Exception {
env.put("DD_CI_HOSTNAME", "testDDCiHostname");
jenkinsRule.jenkins.getGlobalNodeProperties().add(envProps);
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationCustomHostname");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineOnWorkers.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineOnWorkers.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
// schedule build and wait for it to get queued
@@ -1378,10 +1365,7 @@ public void testCustomHostnameForWorkersWebhook() throws Exception {
env.put("DD_CI_HOSTNAME", "testDDCiHostname");
jenkinsRule.jenkins.getGlobalNodeProperties().add(envProps);
WorkflowJob job = jenkinsRule.jenkins.createProject(WorkflowJob.class, "pipelineIntegrationCustomHostnameWebhook");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineOnWorkersWebhook.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineOnWorkersWebhook.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
// schedule build and wait for it to get queued
@@ -1525,10 +1509,7 @@ private void assertNoneNameParallelStep(TraceSpan step, TraceSpan stage01, Trace
public void testIsManualTrue() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualTrue");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new UserIdCause("johanna"));
job.scheduleBuild2(0, causeAction).get();
@@ -1547,10 +1528,7 @@ public void testIsManualTrueWebhooks() throws Exception {
clientStub.configureForWebhooks();
Jenkins jenkins = jenkinsRule.jenkins;
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualTrueWebhook");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new UserIdCause("johanna"));
job.scheduleBuild2(0, causeAction).get();
@@ -1558,18 +1536,15 @@ public void testIsManualTrueWebhooks() throws Exception {
clientStub.waitForWebhooks(3);
final List webhooks = clientStub.getWebhooks();
assertEquals(3, webhooks.size());
- final JSONObject webhook = webhooks.get(0);
- assertTrue(webhook.getBoolean("is_manual"));
+ JSONObject pipeline = searchWebhookByLevel(webhooks, "pipeline");
+ assertTrue(pipeline.getBoolean("is_manual"));
}
@Test
public void testIsManualFalse() throws Exception {
Jenkins jenkins = jenkinsRule.jenkins;
WorkflowJob job = jenkins.createProject(WorkflowJob.class, "pipelineIntegrationIsManualFalse");
- String definition = IOUtils.toString(
- this.getClass().getResourceAsStream("testPipelineSuccess.txt"),
- "UTF-8"
- );
+ String definition = getPipelineDefinition("testPipelineSuccess.txt");
job.setDefinition(new CpsFlowDefinition(definition, true));
CauseAction causeAction = new CauseAction(new TimerTriggerCause(), new SCMTriggerCause("scm"));
job.scheduleBuild2(0, causeAction).get();
@@ -1577,21 +1552,26 @@ public void testIsManualFalse() throws Exception {
clientStub.waitForTraces(3);
final List spans = clientStub.getSpans();
assertEquals(3, spans.size());
- final TraceSpan buildSpan = spans.get(0);
+ final TraceSpan buildSpan = getBuild(spans);
final String isManual = buildSpan.getMeta().get(CITags.IS_MANUAL);
assertEquals("false", isManual);
}
+ private TraceSpan getBuild(List