diff --git a/.taskcluster.yml b/.taskcluster.yml index c678e8073..b114ff365 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -14,6 +14,7 @@ tasks: setup_pushapkscript: 'apt-get update && apt-get install -y default-jdk &&' setup_pushsnapscript: 'apt-get update && apt-get install -y libsodium-dev && truncate -s 0 /etc/os-release &&' setup_pushflatpakscript: 'apt-get update && apt-get install -y gir1.2-ostree-1.0 libgirepository1.0-dev &&' + setup_rust: 'rustup component add clippy rustfmt &&' in: # [ , , , ] - ['client', '37', '', ''] @@ -44,6 +45,8 @@ tasks: - ['signingscript', '38', '', 'mozilla/releng-signingscript'] - ['treescript', '37', '', ''] - ['treescript', '38', '', 'mozilla/releng-treescript'] + - ['pypiscript', '38', '', 'mozilla/releng-pypiscript'] + - ['rust', 'rust', '${setup_rust}', ''] # ------------------------------------------------------------------------- HEAD_REV: @@ -68,6 +71,8 @@ tasks: $if: 'tasks_for == "github-push" && event.ref[0:11] == "refs/heads/"' then: '${event.ref[11:]}' else: 'unknown' + + rust_version: 1.43 in: $flatten: $map: { "$eval": "PROJECTS" } @@ -115,7 +120,7 @@ tasks: in: $match: # Run code linting and unit tests for each project - 'run_tests == "1"': + 'run_tests == "1" && project_name != "rust"': taskId: '${as_slugid(project_name + python_version)}' provisionerId: 'releng-t' workerType: 'linux' @@ -147,6 +152,37 @@ tasks: description: 'Code linting and unit tests for ${project_name} on python ${python_version[0]}.${python_version[1]}' owner: '${OWNER}' source: '${REPO_URL}/raw/${HEAD_REV}/.taskcluster.yml' + 'run_tests == "1" && project_name == "rust"': + taskId: '${as_slugid("rust")}' + provisionerId: 'releng-t' + workerType: 'linux' + created: { $fromNow: '' } + deadline: { $fromNow: '4 hours' } + payload: + maxRunTime: 3600 + image: 'rust:${rust_version}' + command: + - sh + - -xce + - >- + cd /tmp && + wget ${REPO_URL}/archive/${HEAD_REV}.tar.gz && + tar zxf ${HEAD_REV}.tar.gz && + mv scriptworker-scripts-${HEAD_REV} /src && + cd /src && ${setup_command} + cargo test && cargo clippy && cargo fmt -- --check + metadata: + name: + $let: + test_task_number: + $if: 'dockerhub_repo != ""' + then: '${i+1}.1' + else: '${i+1}' + in: + '${number_prefix}${test_task_number}. ${project_name}: Run rust checks [on ${BRANCH_NAME}]' + description: 'Code linting and unit tests for rust code on rust ${rust_version}' + owner: '${OWNER}' + source: '${REPO_URL}/raw/${HEAD_REV}/.taskcluster.yml' # Build docker image and (optionally) push to docker hub 'run_tests == "1" && dockerhub_repo != ""': taskId: '${as_slugid(project_name + "docker_build_and_push")}' diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..918e30cb1 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] + +members = [ + "pypiscript", + "script", +] diff --git a/docker.d/init.sh b/docker.d/init.sh index 1c5b4cc52..1df346442 100755 --- a/docker.d/init.sh +++ b/docker.d/init.sh @@ -74,6 +74,9 @@ case $COT_PRODUCT in adhoc) export TRUST_DOMAIN=adhoc ;; + releng) + export TRUST_DOMAIN=releng + ;; *) echo "Unknown COT_PRODUCT $COT_PRODUCT" exit 1 @@ -114,6 +117,7 @@ export WORK_DIR=/app/workdir export WORKER_TYPE="${TRUST_DOMAIN}-${TRUST_LEVEL}-${PROJECT_NAME}${WORKER_SUFFIX}" export WORKER_GROUP=${WORKER_TYPE} export WORKER_ID_PREFIX="${WORKER_TYPE}-" +export PASS_WORK_DIR=false # # ensure configuration folder exists we can write to it diff --git a/docker.d/scriptworker.yml b/docker.d/scriptworker.yml index d2d648ffe..f2ec2211f 100644 --- a/docker.d/scriptworker.yml +++ b/docker.d/scriptworker.yml @@ -14,6 +14,7 @@ task_max_timeout: 3600 task_script: - { "$eval": "TASK_SCRIPT" } - { "$eval": "TASK_CONFIG" } + - { "$if": "PASS_WORK_DIR == 'true'", then: "${WORK_DIR}" } verbose: { "$eval": "VERBOSE == 'true'" } verify_chain_of_trust: { "$eval": "VERIFY_CHAIN_OF_TRUST == 'true'" } sign_chain_of_trust: { "$eval": "SIGN_CHAIN_OF_TRUST == 'true'" } @@ -30,3 +31,39 @@ scriptworker_provisioners: - { "$eval": "PROVISIONER_ID" } - scriptworker-prov-v1 # keep for mac-v3-signing ed25519_private_key_path: { "$eval": "ED25519_PRIVKEY_PATH" } + +valid_decision_worker_pools: + by-cot-product: + releng: + - releng-1/linux + - releng-3/linux +valid_docker_image_worker_pools: + by-cot-product: + releng: + - releng-1/linux + - releng-3/linux +trusted_vcs_rules: + by-cot-product: + releng: [] +valid_tasks_for: + by-cot-product: + releng: + - "github-pull-request" + - "github-push" + - "github-release" +official_github_repos_owner: + by-cot-product: + releng: mozilla-releng +cot_restricted_scopes: + by-cot-product: + releng: [] +cot_restricted_trees: + by-cot-product: + releng: [] +prebuilt_docker_image_task_types: + - decision + - action + - docker-image +source_env_prefix: RELENG + +project_configuration_url: https://hg.mozilla.org/ci/ci-configuration-try/raw-file/65ca84a9c40e1d7764dce2ed8469165191f6764a/projects.yml diff --git a/pypiscript/Cargo.toml b/pypiscript/Cargo.toml new file mode 100644 index 000000000..40d15ea26 --- /dev/null +++ b/pypiscript/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "pypiscript" +version = "0.1.0" +authors = ["Tom Prince "] +edition = "2018" + +[dependencies] +serde = "1.0.99" +serde_derive = "1.0.99" +scriptworker_script = { path = "../script" } diff --git a/pypiscript/Dockerfile b/pypiscript/Dockerfile new file mode 100644 index 000000000..c99c1599b --- /dev/null +++ b/pypiscript/Dockerfile @@ -0,0 +1,46 @@ +FROM python:3.8 + +RUN groupadd --gid 10001 app && \ + useradd -g app --uid 10001 --shell /usr/sbin/nologin --create-home --home-dir /app app + +RUN ln -s /app/docker.d/healthcheck /bin/healthcheck + +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH \ + RUST_VERSION=1.42.0 + +RUN set -eux; \ + url="https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-unknown-linux-gnu/rustup-init"; \ + wget "$url"; \ + echo "ad1f8b5199b3b9e231472ed7aa08d2e5d1d539198a15c5b1e53c746aad81d27b *rustup-init" | sha256sum -c -; \ + chmod +x rustup-init; \ + ./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION; \ + rm rustup-init; \ + chmod -R a+w $RUSTUP_HOME $CARGO_HOME; \ + rustup --version; \ + cargo --version; \ + rustc --version; + +COPY . /app/ +COPY pypiscript/docker.d/* /app/docker.d/ +RUN chown -R app:app /app + +USER app +WORKDIR /app + +RUN python -m venv /app \ + && cd pypiscript \ + && /app/bin/pip install -r requirements/base.txt \ + && python -m venv /app/configloader_venv \ + && cd /app/configloader \ + && /app/configloader_venv/bin/pip install -r requirements/base.txt \ + && /app/configloader_venv/bin/pip install . \ + && cd /app \ + && cargo install --path pypiscript --root . \ + && mkdir /app/configs + +RUN python -m pip install twine + + +CMD ["/app/docker.d/init.sh"] diff --git a/pypiscript/config-example.json b/pypiscript/config-example.json new file mode 100644 index 000000000..d2a519927 --- /dev/null +++ b/pypiscript/config-example.json @@ -0,0 +1,4 @@ +{ + "taskcluster_scope_prefix": "project:releng", + "project_config_file": "/Depot/Mozilla/scriptworker-scripts/pypiscript/passwords.yml" +} diff --git a/pypiscript/docker.d/init_worker.sh b/pypiscript/docker.d/init_worker.sh new file mode 100644 index 000000000..09988368e --- /dev/null +++ b/pypiscript/docker.d/init_worker.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -e errexit -o pipefail + +test_var_set() { + local varname=$1 + + if [[ -z "${!varname}" ]]; then + echo "error: ${varname} is not set" + exit 1 + fi +} + +# +# Check that all required variables exist +# +test_var_set 'CONFIG_DIR' +test_var_set 'CONFIG_LOADER' +test_var_set 'COT_PRODUCT' +test_var_set 'PROJECT_NAME' +test_var_set 'TEMPLATE_DIR' + +export PASS_WORK_DIR=true + +export PASSWORDS_PATH=$CONFIG_DIR/passwords.json + +if [[ ! -f $PASSWORDS_PATH ]]; then + echo "error: ${PASSWORDS_PATH} is missing" +fi diff --git a/pypiscript/docker.d/worker.yml b/pypiscript/docker.d/worker.yml new file mode 100644 index 000000000..525a95b45 --- /dev/null +++ b/pypiscript/docker.d/worker.yml @@ -0,0 +1,9 @@ +#artifact_dir: { "$eval": "ARTIFACTS_DIR" } +project_config_file: "${PASSWORDS_PATH}" +taskcluster_scope_prefix: + $let: + value: + $match: + 'COT_PRODUCT == "releng"': + 'project:releng' + in: '${value[0]}' diff --git a/pypiscript/requirements/base.in b/pypiscript/requirements/base.in new file mode 100644 index 000000000..e1e2285ab --- /dev/null +++ b/pypiscript/requirements/base.in @@ -0,0 +1 @@ +scriptworker diff --git a/pypiscript/requirements/base.txt b/pypiscript/requirements/base.txt new file mode 100644 index 000000000..6b2b1532a --- /dev/null +++ b/pypiscript/requirements/base.txt @@ -0,0 +1,45 @@ +# SHA1:c829b627d519a44c3705088241a3460302f21bf5 +# +# This file is autogenerated by pip-compile-multi +# To update, run: +# +# pip-compile-multi +# +aiohttp==3.6.2 # via scriptworker, taskcluster +aiomemoizettl==0.0.3 # via scriptworker +arrow==0.15.5 # via scriptworker +async-timeout==3.0.1 # via aiohttp, taskcluster +attrs==19.3.0 # via aiohttp, jsonschema +certifi==2019.11.28 # via requests +cffi==1.14.0 # via cryptography +chardet==3.0.4 # via aiohttp, requests +cryptography==2.8 # via jwcrypto, scriptworker +dictdiffer==0.8.1 # via scriptworker +github3.py==1.3.0 # via scriptworker +idna-ssl==1.1.0 # via aiohttp +idna==2.9 # via idna-ssl, requests, yarl +immutabledict==1.0.0 # via scriptworker +importlib-metadata==1.5.2 # via jsonschema +json-e==4.0.1 # via scriptworker +jsonschema==3.2.0 # via scriptworker +jwcrypto==0.7 # via github3.py +mohawk==1.1.0 # via taskcluster +multidict==4.7.5 # via aiohttp, yarl +pycparser==2.20 # via cffi +pyrsistent==0.16.0 # via jsonschema +python-dateutil==2.8.1 # via arrow, github3.py +pyyaml==5.3.1 # via scriptworker +requests==2.23.0 # via github3.py, taskcluster +scriptworker==32.2.1 # via -r pypiscript/requirements/base.in +six==1.14.0 # via cryptography, jsonschema, mohawk, pyrsistent, python-dateutil, taskcluster +slugid==2.0.0 # via taskcluster +taskcluster-urls==12.1.0 # via taskcluster +taskcluster==28.1.0 # via scriptworker +typing-extensions==3.7.4.1 # via aiohttp +uritemplate==3.0.1 # via github3.py +urllib3==1.25.8 # via requests +yarl==1.4.2 # via aiohttp +zipp==3.1.0 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/pypiscript/src/main.rs b/pypiscript/src/main.rs new file mode 100644 index 000000000..cf41a1b1f --- /dev/null +++ b/pypiscript/src/main.rs @@ -0,0 +1,136 @@ +use scriptworker_script::{Context, Error, Task}; +use serde_derive::Deserialize; +use std::collections::HashMap; +use std::os::unix::process::ExitStatusExt; +use std::process::Command; + +#[derive(Deserialize)] +#[serde(deny_unknown_fields)] +struct Config { + #[serde( + alias = "project_config_file", + deserialize_with = "scriptworker_script::load_secrets" + )] + projects: HashMap, + #[serde(alias = "taskcluster_scope_prefix")] + scope_prefix: String, +} + +#[derive(Deserialize)] +#[serde(deny_unknown_fields)] +struct Project { + api_token: String, + //#[serde(default = "https://test.pypi/legacy/")] + repository_url: String, +} + +#[derive(Deserialize, Debug)] +struct Attr { + project: String, +} + +#[derive(Deserialize, Debug)] +struct Extra { + action: String, +} + +fn verify_payload(config: &Config, _: &Context, task: &Task) -> Result<(), Error> { + if task.payload.extra.action != "upload" { + return Err(Error::MalformedPayload(format!( + "Unsupported action: {}", + task.payload.extra.action + ))); + } + + task.require_scopes(task.payload.upstream_artifacts.iter().map(|upstream| { + let project_name = &upstream.attributes.project; + format!("{}:pypi:project:{}", config.scope_prefix, project_name) + })) +} + +fn run_command(mut command: Command, action: &dyn Fn() -> String) -> Result<(), Error> { + println!("Running: {:?}", command); + match command.status() { + Ok(result) => { + if !result.success() { + println!( + "Failed to {}: {}", + action(), + match (result.code(), result.signal()) { + (Some(code), _) => format!("exit code {}", code), + (_, Some(signal)) => format!("exited with signal {}", signal), + (None, None) => "unknown exit reason".to_string(), + } + ); + return Err(Error::Failure); + } + Ok(()) + } + Err(err) => { + println!("Failed to start command: {:?}", err); + Err(Error::Failure) + } + } +} + +impl Config { + fn get_project(&self, project_name: &str) -> Result<&Project, Error> { + self.projects.get(project_name).ok_or_else(|| { + Error::MalformedPayload(format!("Unknown pypi project {}", project_name)) + }) + } +} + +#[scriptworker_script::main] +fn do_work(config: Config, context: &Context, task: Task) -> Result<(), Error> { + verify_payload(&config, &context, &task)?; + + task.payload + .upstream_artifacts + .iter() + .map(|upstream| -> Result<(), Error> { + let project_name = &upstream.attributes.project; + // Ensure project exists + config.get_project(project_name)?; + + let mut command = Command::new("twine"); + command.arg("check"); + for artifact in &upstream.paths { + command.arg(artifact.file_path(context)); + } + run_command(command, &|| format!("upload files for {}", project_name)) + }) + .fold(Ok(()), Result::or)?; + + for upstream in &task.payload.upstream_artifacts { + let project_name = &upstream.attributes.project; + let project = config.get_project(project_name)?; + + println!( + "Uploading {} from task {} to {} for project {}", + &upstream + .paths + .iter() + .map(|p| p.task_path().to_string_lossy()) + .collect::>() + .join(", "), + &upstream.task_id, + project.repository_url, + project_name + ); + + let mut command = Command::new("twine"); + command + .arg("upload") + .arg("--user") + .arg("__token__") + .arg("--repository-url") + .arg(&project.repository_url); + for artifact in &upstream.paths { + command.arg(artifact.file_path(context)); + } + command.env("TWINE_PASSWORD", &project.api_token); + run_command(command, &|| format!("upload files for {}", project_name))?; + } + Ok(()) +} diff --git a/pypiscript/work/task.json b/pypiscript/work/task.json new file mode 100644 index 000000000..6b93cca37 --- /dev/null +++ b/pypiscript/work/task.json @@ -0,0 +1,13 @@ +{ + "scopes": ["project:releng:pypi:project:redo"], + "dependencies": [], + "payload": { + "action": "upload", + "upstreamArtifacts": [{ + "taskId": "slug", + "taskType": "scriptworker", + "paths": ["public/redo-2.0.3-py2.py3-none-any.whl"], + "project": "redo" + }] + } +} diff --git a/script/Cargo.toml b/script/Cargo.toml new file mode 100644 index 000000000..f644b3e8d --- /dev/null +++ b/script/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "scriptworker_script" +version = "0.1.0" +authors = ["Tom Prince "] +edition = "2018" + +[dependencies] +# We disable features for clap, since it isn't used interactively. +clap = {version = "2.33.0", default-features = false} +serde_yaml = "0.8.9" +serde_json = "1.0.40" +serde = "1.0.99" +serde_derive = "1.0.99" +scriptworker_script_macros = { path = "macros" } diff --git a/script/macros/Cargo.toml b/script/macros/Cargo.toml new file mode 100644 index 000000000..5a755d4bf --- /dev/null +++ b/script/macros/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "scriptworker_script_macros" +version = "0.1.0" +authors = ["Tom Prince "] +edition = "2018" + +[dependencies] +syn = {version = "1.0.5", features=["full"]} +quote = "1.0.2" + +[lib] +proc-macro = true diff --git a/script/macros/src/lib.rs b/script/macros/src/lib.rs new file mode 100644 index 000000000..bb658ce22 --- /dev/null +++ b/script/macros/src/lib.rs @@ -0,0 +1,25 @@ +#![cfg(not(test))] // Work around for rust-lang/rust#62127 +extern crate proc_macro; + +use proc_macro::TokenStream; +use quote::quote; + +#[proc_macro_attribute] +pub fn main(args: TokenStream, item: TokenStream) -> TokenStream { + let input = syn::parse_macro_input!(item as syn::ItemFn); + let args = syn::parse_macro_input!(args as syn::AttributeArgs); + + if !args.is_empty() { + panic!("???") + } + + let name = &input.sig.ident; + + let result = quote! { + #input + fn main() { + ::scriptworker_script::scriptworker_main(#name) + } + }; + result.into() +} diff --git a/script/src/error.rs b/script/src/error.rs new file mode 100644 index 000000000..a64d989e4 --- /dev/null +++ b/script/src/error.rs @@ -0,0 +1,59 @@ +use std::convert::From; + +#[derive(Clone)] +pub enum Error { + Failure, + WorkerShutdown, + MalformedPayload(String), + ResourceUnavailable, + InternalError(String), + Superseded, + IntermittentTask, +} + +impl From for Error { + fn from(err: std::io::Error) -> Error { + Error::InternalError(format!("{}", err)) + } +} + +impl From for Error { + fn from(err: serde_yaml::Error) -> Error { + Error::InternalError(format!("{}", err)) + } +} + +impl Error { + pub(crate) fn exit_code(self) -> i32 { + match self { + Self::Failure => 1, + Self::WorkerShutdown => 2, + Self::MalformedPayload(_) => 3, + Self::ResourceUnavailable => 4, + Self::InternalError(_) => 5, + Self::Superseded => 6, + Self::IntermittentTask => 7, + } + } + + #[allow(dead_code)] + pub(crate) fn description(self) -> &'static str { + match self { + Self::Failure => "failure", + Self::WorkerShutdown => "worker-shutdown", + Self::MalformedPayload(_) => "malformed-payload", + Self::ResourceUnavailable => "resource-unavailable", + Self::InternalError(_) => "internal-error", + Self::Superseded => "superseded", + Self::IntermittentTask => "intermittent-task", + } + } +} + +/* +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, + } +} +*/ diff --git a/script/src/lib.rs b/script/src/lib.rs new file mode 100644 index 000000000..91f8013a6 --- /dev/null +++ b/script/src/lib.rs @@ -0,0 +1,77 @@ +use serde::de::DeserializeOwned; +use std::path::{Path, PathBuf}; + +use clap::{App, Arg}; + +mod error; +pub use error::Error; + +pub mod task; +pub use task::Task; + +pub struct Context { + work_dir: PathBuf, +} + +fn init_config() -> Result<(T, PathBuf), Error> +where + T: DeserializeOwned, +{ + let matches = App::new("scriptworker") + .arg(Arg::with_name("CONFIG_FILE").index(1).required(true)) + .arg(Arg::with_name("WORK_DIR").index(2).required(true)) + .get_matches(); + + let config_file = matches.value_of_os("CONFIG_FILE").unwrap(); + let work_dir = Path::new(matches.value_of_os("WORK_DIR").unwrap()); + Ok(( + serde_yaml::from_reader(std::fs::File::open(config_file)?)?, + work_dir.into(), + )) +} + +pub fn load_secrets<'de, D, T>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, + T: DeserializeOwned, +{ + let secret_file_path: String = serde::Deserialize::deserialize(deserializer)?; + let secret_file = std::fs::File::open(secret_file_path) + .map_err(|_| serde::de::Error::custom("Could not open secret file."))?; + Ok(serde_yaml::from_reader(secret_file) + .map_err(|_| serde::de::Error::custom("Could not parse secrets file."))?) +} + +pub fn scriptworker_main( + do_work: impl FnOnce(Config, &Context, Task) -> Result<(), Error>, +) where + Config: DeserializeOwned, + A: DeserializeOwned, + E: DeserializeOwned, +{ + let result = (|| { + let (config, work_dir) = init_config::()?; + // TODO: logging + let task_filename = work_dir.join("task.json"); + let task = Task::::load(&task_filename)?; + + do_work(config, &Context { work_dir }, task) + })(); + match result { + Ok(()) => std::process::exit(0), + Err(err) => { + if let Error::MalformedPayload(message) = &err { + std::println!("{}", &message) + } + if let Error::InternalError(message) = &err { + std::println!("{}", &message) + } + std::process::exit(err.exit_code()) + } + } + // TODO: Statuses +} + +#[cfg(not(test))] +// Work around for rust-lang/rust#62127 +pub use scriptworker_script_macros::main; diff --git a/script/src/task.rs b/script/src/task.rs new file mode 100644 index 000000000..1f640223c --- /dev/null +++ b/script/src/task.rs @@ -0,0 +1,135 @@ +use std::path::{Path, PathBuf}; + +use serde::de::DeserializeOwned; +use serde_derive::Deserialize; + +use crate::error::Error; +use crate::Context; + +#[derive(Deserialize, Debug)] +pub struct Empty {} + +#[derive(Debug)] +pub struct TaskArtifacts { + pub task_type: String, + pub task_id: String, + // TODO: Path + pub paths: Vec, + pub attributes: A, +} + +impl<'de, A> serde::Deserialize<'de> for TaskArtifacts +where + A: serde::Deserialize<'de>, +{ + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + #[derive(Deserialize)] + #[serde(rename_all = "camelCase")] + struct RawArtifacts { + pub task_type: String, + pub task_id: String, + // TODO: Path + pub paths: Vec, + #[serde(flatten)] + pub attributes: A, + } + let raw: RawArtifacts = serde::Deserialize::deserialize(deserializer)?; + let task_id = raw.task_id.clone(); + let paths = raw + .paths + .into_iter() + .map(|path| { + if path.is_absolute() { + Err(serde::de::Error::custom( + "Cannot sepecify absolute path in upstreamArtifacts.", + )) + } else { + Ok(ArtifactPath { + task_id: task_id.clone(), + path, + }) + } + }) + .collect::>()?; + Ok(TaskArtifacts:: { + task_type: raw.task_type, + task_id: raw.task_id, + paths, + attributes: raw.attributes, + }) + } +} + +#[derive(Debug)] +pub struct ArtifactPath { + task_id: String, + path: PathBuf, +} + +impl ArtifactPath { + pub fn task_path(&self) -> &PathBuf { + &self.path + } + pub fn file_path(&self, context: &Context) -> PathBuf { + context + .work_dir + .join("cot") + .join(&self.task_id) + .join(&self.path) + } +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct TaskPayload { + pub upstream_artifacts: Vec>, + #[serde(flatten)] + pub extra: E, +} + +#[derive(Deserialize, Debug)] +pub struct Task { + pub dependencies: Vec, + pub scopes: Vec, + pub payload: TaskPayload, +} + +impl Task { + pub(crate) fn load(filename: &Path) -> Result, Error> + where + A: DeserializeOwned, + E: DeserializeOwned, + { + let file = std::fs::File::open(filename) + .map_err(|_| Error::InternalError("Could not open task definition.".to_string()))?; + Ok(serde_json::from_reader(file).map_err(|err| { + Error::MalformedPayload(format!("Could not parse task payload: {}", err)) + })?) + } + + pub fn require_scope(&self, scope: &str) -> Result<(), Error> { + if self.scopes.iter().any(|x| x == scope) { + Ok(()) + } else { + Err(Error::MalformedPayload(format!("missing scope {}", scope))) + } + } + + pub fn require_scopes(&self, scopes: impl IntoIterator) -> Result<(), Error> { + let missing_scopes: Vec<_> = scopes + .into_iter() + .filter(|scope| self.scopes.iter().all(|x| x != scope)) + .collect(); + if missing_scopes.is_empty() { + Ok(()) + } else { + Err(Error::MalformedPayload(format!( + "missing scopes: {:?}", + missing_scopes + ))) + } + } +}