This repository has been archived by the owner on Feb 21, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 2
/
Jenkinsfile
118 lines (108 loc) · 3.8 KB
/
Jenkinsfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
pipeline {
agent {
docker {
reuseNode false
image 'caufieldjh/kg-idg:4'
}
}
//triggers{
// cron('H H 1 1-12 *')
//}
environment {
BUILDSTARTDATE = sh(script: "echo `date +%Y%m%d`", returnStdout: true).trim()
S3PROJECTDIR = '' // no trailing slash
// Distribution ID for the AWS CloudFront for this bucket
// used solely for invalidations
AWS_CLOUDFRONT_DISTRIBUTION_ID = 'EUVSWXZQBXCFP'
}
options {
timestamps()
disableConcurrentBuilds()
}
stages {
stage('Ready and clean') {
steps {
// Give us a minute to cancel if we want.
sleep time: 30, unit: 'SECONDS'
}
}
stage('Initialize') {
steps {
// print some info
dir('./working') {
sh 'env > env.txt'
sh 'echo $BRANCH_NAME > branch.txt'
sh 'echo "$BRANCH_NAME"'
sh 'cat env.txt'
sh 'cat branch.txt'
sh "echo $BUILDSTARTDATE"
sh "python3.9 --version"
sh "id"
sh "whoami" // this should be jenkinsuser
// if the above fails, then the docker host didn't start the docker
// container as a user that this image knows about. This will
// likely cause lots of problems (like trying to write to $HOME
// directory that doesn't exist, etc), so we should fail here and
// have the user fix this
}
}
}
stage('Setup') {
steps {
dir('./working') {
sh '/usr/bin/python3.9 -m venv venv'
sh '. venv/bin/activate'
sh './venv/bin/pip install oaklib s3cmd'
}
}
}
stage('Run downloader') {
steps {
dir('./working') {
sh '. venv/bin/activate && rm data/raw/uniprot_empty_organism.tsv'
sh '. venv/bin/activate && make all'
}
}
}
// Harry to help here
stage('Upload result') {
// Store similarity results at s3://kg-hub-public-data/frozen_incoming_data/uniprot
steps {
dir('./working') {
script {
withCredentials([
file(credentialsId: 's3cmd_kg_hub_push_configuration', variable: 'S3CMD_CFG'),
file(credentialsId: 'aws_kg_hub_push_json', variable: 'AWS_JSON'),
string(credentialsId: 'aws_kg_hub_access_key', variable: 'AWS_ACCESS_KEY_ID'),
string(credentialsId: 'aws_kg_hub_secret_key', variable: 'AWS_SECRET_ACCESS_KEY')]) {
// upload to remote
sh 'tar -czvf uniprot_proteomes.tar.gz ./data/raw/s3'
sh '. venv/bin/activate && s3cmd -c $S3CMD_CFG put -pr --acl-public --cf-invalidate uniprot_proteomes.tar.gz s3://kg-hub-public-data/frozen_incoming_data/uniprot/'
// Should now appear at:
// https://kg-hub.berkeleybop.io/frozen_incoming_data/uniprot
}
}
}
}
}
}
post {
always {
echo 'In always'
echo 'Cleaning workspace...'
cleanWs()
}
success {
echo 'I succeeded!'
}
unstable {
echo 'I am unstable :/'
}
failure {
echo 'I failed :('
}
changed {
echo 'Things were different before...'
}
}
}