Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Auto rerun test build in FAILURE status #5747

Merged
merged 2 commits into from
Nov 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
230 changes: 125 additions & 105 deletions buildenv/jenkins/JenkinsfileBase
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def setupParallelEnv() {

echo "[PARALLEL: ${params.PARALLEL}] childJobNum is ${childJobNum}, creating jobs and running them in parallel..."
create_jobs = [:]

parallelTestParams =[:]
for (int i = 0; i < childJobNum; i++) {
def buildListName = env.BUILD_LIST
def childTest = ""
Expand Down Expand Up @@ -319,6 +319,7 @@ def setupParallelEnv() {
parallel_tests[childTest] = {
build job: TEST_JOB_NAME, parameters: childParams, propagate: false
}
parallelTestParams[TEST_JOB_NAME] = childParams
}

if (create_jobs) {
Expand Down Expand Up @@ -1359,136 +1360,155 @@ def generateJob (newJobs, childTest, testJobName) {
}
}
}
def triggerRerunJob () {
// if the JOB_NAME contains _rerun or _testList_, we will not trigger rerun job
/* triggerRerunJob() triggers rerun at parent job level. There are two type of rerun builds:
* - rerun due to failed test(s) - RERUN_ITERATIONS > 0 and list of failed test target(s) (i.e., env.FAILED_TESTS cannot be empty)
* - rerun due to test job in FAILURE state - RERUN_FAILURE is true and rerunTestJobParams cannot be empty
* If the JOB_NAME contains _rerun, _testList_ or _iteration_, rerun job will not be triggered at the same level.
*/
def triggerRerunJob (rerunTestJobParams) {
if (!JOB_NAME.contains("_rerun") && !JOB_NAME.contains("_testList_") && !JOB_NAME.contains("_iteration_")) {
int rerunIterations = params.RERUN_ITERATIONS ? params.RERUN_ITERATIONS.toInteger() : 0
if (rerunIterations > 0 && env.FAILED_TESTS?.trim()) {
if ((rerunIterations > 0 && env.FAILED_TESTS?.trim()) || (params.RERUN_FAILURE && rerunTestJobParams)) {
stage('Rerun') {
def rerunJobName = "${JOB_NAME}_rerun"
def newJobs = [:]
def childParams = []
def rerunTestJobs =[:]
echo "allocate a node for generating rerun job ..."
node (env.SETUP_LABEL) {
generateJob(newJobs, rerunJobName, rerunJobName)
parallel newJobs

// loop through all the params and change the parameters if needed
params.each { param ->
// set PARALLEL, NUM_MACHINES and TEST_TIME to default values
// set TARGET to failed tests and set ITERATIONS to rerunIterations
if (param.key == "TARGET") {
if (env.RERUN_TESTCASES) {
childParams << string(name: param.key, value: env.RERUN_CUSTOMTARGET)
} else {
if (env.FAILED_TESTS.contains("_custom")) {
childParams << string(name: param.key, value: env.FAILED_TESTS)
if (rerunIterations > 0 && env.FAILED_TESTS?.trim()) {
def rerunJobName = "${JOB_NAME}_rerun"
def newJobs = [:]
def childParams = []
echo "Generating rerun ${rerunJobName} job for running failed test(s) ..."
generateJob(newJobs, rerunJobName, rerunJobName)
parallel newJobs

// loop through all the params and change the parameters if needed
params.each { param ->
// set PARALLEL, NUM_MACHINES and TEST_TIME to default values
// set TARGET to failed tests and set ITERATIONS to rerunIterations
if (param.key == "TARGET") {
if (env.RERUN_TESTCASES) {
childParams << string(name: param.key, value: env.RERUN_CUSTOMTARGET)
} else {
childParams << string(name: param.key, value: "testList TESTLIST=" + env.FAILED_TESTS)
if (env.FAILED_TESTS.contains("_custom")) {
childParams << string(name: param.key, value: env.FAILED_TESTS)
} else {
childParams << string(name: param.key, value: "testList TESTLIST=" + env.FAILED_TESTS)
}
}
}
} else if (param.key == "CUSTOM_TARGET") {
if (env.RERUN_TESTCASES) {
childParams << string(name: param.key, value: env.RERUN_TESTCASES)
} else {
childParams << string(name: param.key, value: param.value.toString())
}
} else if (param.key == "PARALLEL") {
childParams << string(name: param.key, value: "None")
} else if (param.key == "NUM_MACHINES") {
childParams << string(name: param.key, value: "")
} else if (param.key == "TEST_TIME") {
childParams << string(name: param.key, value: "")
} else if (param.key == "ITERATIONS") {
childParams << string(name: param.key, value: rerunIterations.toString())
} else {
def value = param.value.toString()
if (value == "true" || value == "false") {
childParams << booleanParam(name: param.key, value: value.toBoolean())
} else if (param.key == "CUSTOM_TARGET") {
if (env.RERUN_TESTCASES) {
childParams << string(name: param.key, value: env.RERUN_TESTCASES)
} else {
childParams << string(name: param.key, value: param.value.toString())
}
} else if (param.key == "PARALLEL") {
childParams << string(name: param.key, value: "None")
} else if (param.key == "NUM_MACHINES") {
childParams << string(name: param.key, value: "")
} else if (param.key == "TEST_TIME") {
childParams << string(name: param.key, value: "")
} else if (param.key == "ITERATIONS") {
childParams << string(name: param.key, value: rerunIterations.toString())
} else {
childParams << string(name: param.key, value: value)
def value = param.value.toString()
if (value == "true" || value == "false") {
childParams << booleanParam(name: param.key, value: value.toBoolean())
} else {
childParams << string(name: param.key, value: value)
}
}
}
rerunTestJobs[rerunJobName] = {
build job: rerunJobName, parameters: childParams, propagate: false
}
}
// generate job to rerun FAILURE test build(s)
if (params.RERUN_FAILURE && rerunTestJobParams) {
def newRerunJobs = [:]
rerunTestJobParams.each {
name, jobParams ->
def rerunJobName = "${name}_rerun"
echo "Generating rerun ${rerunJobName} job for running FAILURE test build(s) ..."
generateJob(newRerunJobs, rerunJobName, rerunJobName)
rerunTestJobs[rerunJobName] = {
build job: rerunJobName, parameters: jobParams, propagate: false
}
}
// generate the new rerun job
if (newRerunJobs) {
parallel newRerunJobs
}
}
}
if (rerunTestJobs) {
echo "Triggering rerun jobs in parallel ..."
def childJobs = parallel rerunTestJobs
archiveChildJobTap(childJobs)
}
}
}
}
}

if (childParams) {
def rerunJob = build job: rerunJobName, parameters: childParams, propagate: false
def jobResult = rerunJob.getResult()
def buildId = rerunJob.getNumber()
echo "${rerunJobName} #${buildId} completed with status ${jobResult}"
node (env.SETUP_LABEL) {
timeout(time: 1, unit: 'HOURS') {
try {
copyArtifacts (projectName: "${rerunJobName}",
selector: specific("${buildId}"),
filter: "**/*.tap",
target:"${rerunJobName}/${buildId}")
} catch (Exception e) {
echo 'Exception: ' + e.toString()
echo "Cannot copy *.tap from ${rerunJobName} with build id ${buildId} . Skipping copyArtifacts..."
}
def archiveChildJobTap(childJobs) {
node (env.SETUP_LABEL) {
def childTestJobParams = [:]
forceCleanWS()
try {
def buildPaths = ""
childJobs.each {
cjob ->
def jobInvocation = cjob.value.getRawBuild()
def buildId = jobInvocation.getNumber()
def name = cjob.value.getProjectName()
def childResult = cjob.value.getCurrentResult()
echo "${name} #${buildId} completed with status ${childResult}"
// track childTestJobParams if it is FAILURE job
if (childResult == "FAILURE") {
childTestJobParams << parallelTestParams.findAll {it.key == name}
}

try {
step([$class: "TapPublisher", testResults: "${rerunJobName}/${buildId}/**/*.tap", outputTapToConsole: false, failIfNoResults: true])
archiveFile("${rerunJobName}/${buildId}/**/*.tap", true)
} catch (Exception e) {
echo 'Exception: ' + e.toString()
echo "Cannot archive tap files from ${rerunJobName} with build id ${buildId}. Exit ..."
}
try {
timeout(time: 1, unit: 'HOURS') {
copyArtifacts (projectName: "${name}", selector: specific("${buildId}"), filter: "**/*.tap", target:"${name}/${buildId}")
}

step([$class: "TapPublisher", testResults: "${name}/${buildId}/**/*.tap", outputTapToConsole: false, failIfNoResults: true])
archiveFile("${name}/${buildId}/**/*.tap", true)
// add failed tests to Grinder link if it is not from the rerun build
if (!name.contains("_rerun")) {
buildPaths += "${name}/${buildId}/,"
}
} catch (Exception e) {
echo 'Exception: ' + e.toString()
echo "Cannot copy *.tap or AQACert.log from ${name} with buildid ${buildId} . Skipping copyArtifacts..."
}
}
if (!currentBuild.resultIsWorseOrEqualTo(childResult)) {
currentBuild.result = childResult;
}
}
def resultSum = parseResultSumFromTaps()
checkTestResults(resultSum)

archiveAQAvitFiles()
if (buildPaths.length() > 0) {
addFailedTestsGrinderLink(buildPaths.substring(0, buildPaths.length() - 1))
}
} finally {
forceCleanWS()
}
return childTestJobParams
}
}

def run_parallel_tests() {
def runParallelTests() {
def rerunTestJobParams = [:]
if (params.PARALLEL && params.PARALLEL != "None" && (NUM_MACHINES > 1 || params.TEST_TIME)) {
stage ("Parallel Tests") {
def childJobs = parallel parallel_tests
node (env.SETUP_LABEL) {
forceCleanWS()
try {
def buildPaths = ""
childJobs.each {
cjob ->
def jobInvocation = cjob.value.getRawBuild()
def buildId = jobInvocation.getNumber()
def name = cjob.value.getProjectName()
def childResult = cjob.value.getCurrentResult()
try {
echo "${name} #${buildId} completed with status ${childResult}"
timeout(time: 1, unit: 'HOURS') {
copyArtifacts (projectName: "${name}", selector: specific("${buildId}"), filter: "**/*.tap", target:"${name}/${buildId}")
}
step([$class: "TapPublisher", testResults: "${name}/${buildId}/**/*.tap", outputTapToConsole: false, failIfNoResults: true])
archiveFile("${name}/${buildId}/**/*.tap", true)
buildPaths += "${name}/${buildId}/,"
} catch (Exception e) {
echo 'Exception: ' + e.toString()
echo "Cannot copy *.tap or AQACert.log from ${name} with buildid ${buildId} . Skipping copyArtifacts..."
}
if (!currentBuild.resultIsWorseOrEqualTo(childResult)) {
currentBuild.result = childResult;
}
}

def resultSum = parseResultSumFromTaps()
checkTestResults(resultSum)

archiveAQAvitFiles()
if (buildPaths.length() > 0) {
addFailedTestsGrinderLink(buildPaths.substring(0, buildPaths.length() - 1))
}
} finally {
forceCleanWS()
}
}
rerunTestJobParams = archiveChildJobTap(childJobs)
}
}
return rerunTestJobParams
}

def parseResultSumFromTaps() {
Expand Down
4 changes: 2 additions & 2 deletions buildenv/jenkins/openjdk_tests
Original file line number Diff line number Diff line change
Expand Up @@ -404,8 +404,8 @@ timestamps{
if (currentBuild.result != 'FAILURE') {
env.SETUP_LABEL = params.SETUP_LABEL ?: "ci.role.test"
echo "SETUP_LABEL: ${env.SETUP_LABEL}"
jenkinsfile.run_parallel_tests()
jenkinsfile.triggerRerunJob()
def rerunTestJobParams = jenkinsfile.runParallelTests()
jenkinsfile.triggerRerunJob(rerunTestJobParams)
}
} else {
assert false : "Cannot find key PLATFORM: ${params.PLATFORM} in PLATFORM_MAP: ${PLATFORM_MAP}."
Expand Down