Skip to content

Commit

Permalink
Use S3_FILES_TO_UPLOAD and test it
Browse files Browse the repository at this point in the history
Signed-off-by: Jose Luis Rivero <[email protected]>
  • Loading branch information
j-rivero committed Oct 4, 2023
1 parent a5207c8 commit 7edc52e
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 9 deletions.
4 changes: 2 additions & 2 deletions jenkins-scripts/dsl/_configs_/Globals.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -136,12 +136,12 @@ class Globals
return package_name.replaceAll('\\d*$', '')
}

static String _s3_releases_dir(String package_name) {
static String s3_releases_dir(String package_name) {
return get_canonical_package_name(package_name) + '/releases'
}

static String s3_upload_tarball_path(String package_name) {
return 's3://osrf-distributions/' + _s3_releases_dir(package_name)
return 's3://osrf-distributions/' + s3_releases_dir(package_name)
}

static String s3_download_url_basedir(String package_name) {
Expand Down
9 changes: 3 additions & 6 deletions jenkins-scripts/dsl/_configs_/OSRFSourceCreation.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ class OSRFSourceCreation
stringParam("UPLOAD_TO_REPO",
default_params.find{ it.key == "UPLOAD_TO_REPO"}?.value,
"For downstream jobs: OSRF repo name to upload the package to: stable | prerelease | nightly | none (for testing proposes)")
stringParam("PROJECT_NAME_TO_COPY_ARTIFACTS",
"",
"Internal use: parent job name passed by the job to be used in copy artifacts")
}
}
}
Expand Down Expand Up @@ -99,7 +96,7 @@ class OSRFSourceCreation
exit 1
fi
echo "TARBALL_NAME=\${tarball}" >> ${properties_file}
echo "S3_FILES_TO_UPLOAD=\${tarball}" >> ${properties_file}
echo "SOURCE_TARBALL_URI=$s3_download_url_basedir/\${tarball}" >> ${properties_file}
""".stripIndent()
)
Expand Down Expand Up @@ -130,8 +127,8 @@ class OSRFSourceCreation
parameters {
currentBuild()
predefinedProps([PROJECT_NAME_TO_COPY_ARTIFACTS: '${JOB_NAME}',
S3_UPLOAD_PATH: Globals.s3_upload_tarball_path(package_name)])
propertiesFile(properties_file) // TARBALL_NAME
S3_UPLOAD_PATH: Globals.s3_releases_dir(package_name)]) // relative path
propertiesFile(properties_file) // S3_FILES_TO_UPLOAD
}
}
}
Expand Down
18 changes: 17 additions & 1 deletion jenkins-scripts/dsl/test.dsl
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ repo_uploader.with
stringParam('PACKAGE','','Package name')
stringParam('TARBALL_NAME', '', 'Tarball name to upload')
stringParam('S3_UPLOAD_PATH','', 'S3 path to upload')
stringParam('S3_FILES_TO_UPLOAD','', 'S3 file names to upload')
stringParam('UPLOAD_TO_REPO','none','repo to upload')
stringParam("PROJECT_NAME_TO_COPY_ARTIFACTS",
"",
Expand All @@ -76,7 +77,22 @@ repo_uploader.with
ls -R \${WORKSPACE}
test -f \${WORKSPACE}/${pkg_sources_dir}/\${TARBALL_NAME}

echo "Fake upload of \${TARBALL_NAME} to \${S3_UPLOAD_PATH}"
echo "Fake upload of \${S3_FILES_TO_UPLOAD} to \${S3_UPLOAD_PATH}"
// code copied from repository_uploader
pkgs_path="\$WORKSPACE/pkgs"

for pkg in \${S3_FILES_TO_UPLOAD}; do
# S3_UPLOAD_PATH should be send by the upstream job
if [[ -z \${S3_UPLOAD_PATH} ]]; then
echo "S3_UPLOAD_PATH was not defined. Not uploading"
exit 1
fi

# Seems important to upload the path with a final slash
echo "WILL RUN: s3cmd \${pkgs_path}/\${pkg} \${S3_UPLOAD_PATH}"
done


""".stripIndent())
}
}
Expand Down

0 comments on commit 7edc52e

Please sign in to comment.