-
Notifications
You must be signed in to change notification settings - Fork 16
/
build.gradle
390 lines (332 loc) · 15.5 KB
/
build.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
// Dependencies for the buildscript (not the program)
buildscript {
repositories {
mavenCentral()
}
}
plugins {
id "java"
id "application"
id "com.github.johnrengelman.shadow" version "8.1.1" //used to build the shadow and sparkJars
id 'com.palantir.git-version' version '0.5.1' //version helper
}
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
import java.util.stream.Collectors
apply plugin: 'java'
repositories {
mavenCentral()
maven {
url "https://broadinstitute.jfrog.io/broadinstitute/libs-snapshot/"
}
}
// This if -dRelease=true is passed, this should return only the last tag (i.e. 1.0).
// Otherwise it should return the tag and hash, i.e.: 1.08-7-g72826dd
final isRelease = Boolean.getBoolean("release")
final details = versionDetails()
version = (isRelease ? details.lastTag : gitVersion() + "-SNAPSHOT").replaceAll(".dirty", "")
logger.info("build for version:" + version)
//NOTE: we ignore contracts for now
compileJava {
options.compilerArgs = ['-proc:none', '-Xlint:all', '-Werror', '-Xdiags:verbose']
}
compileTestJava {
options.compilerArgs = ['-proc:none', '-Xlint:all', '-Werror', '-Xdiags:verbose']
}
configurations {
externalSourceConfiguration {
// External sources we need for doc and tab completion generation tasks (i.e., Picard sources)
transitive false
}
}
mainClassName = "com.github." + rootProject.name.toLowerCase() + ".Main"
//see this thread: https://github.com/broadinstitute/gatk/issues/2300#issuecomment-333627036
final gatkVersion = '4.5.0.0'
final htsjdkVersion = System.getProperty('htsjdk.version','4.1.0')
final barclayVersion = System.getProperty('barclay.version','5.0.0')
final luceneVersion = System.getProperty('lucene.version','9.10.0')
final testNGVersion = '7.0.0'
final googleCloudNioDependency = 'com.google.cloud:google-cloud-nio:0.127.8'
final log4j2Version = System.getProperty('log4j2Version', '2.17.1')
final docBuildDir = "$buildDir/docs"
logger.info(docBuildDir)
configurations.configureEach {
resolutionStrategy {
// force the htsjdk version so we don't get a different one transitively or GATK
force 'com.github.samtools:htsjdk:' + htsjdkVersion
// later versions explode Hadoop
// TODO: this is the same in GATK, but we should check if they solve this issue in the future
force 'com.google.protobuf:protobuf-java:3.21.6'
// force testng dependency so we don't pick up a different version via GenomicsDB
force 'org.testng:testng:' + testNGVersion
force 'org.broadinstitute:barclay:' + barclayVersion
// make sure we don't pick up an incorrect version of the GATK variant of the google-nio library
// via Picard, etc.
force googleCloudNioDependency
// Added to avoid GATK forcing commons-text 1.10, and opencsv forcing 1.11
force 'org.apache.commons:commons-text:1.11.0'
}
}
dependencies {
implementation 'org.broadinstitute:barclay:' + barclayVersion
implementation 'org.apache.lucene:lucene-core:' + luceneVersion
implementation 'org.apache.lucene:lucene-queryparser:' + luceneVersion
// use the same GATK dependency for compile and documentation
final gatkDependency = 'org.broadinstitute:gatk:' + gatkVersion
implementation (gatkDependency) {
exclude module: 'jgrapht' // this is not required
}
externalSourceConfiguration 'org.broadinstitute:gatk:' + gatkVersion + ':sources'
implementation group: 'org.broadinstitute', name: 'gatk-test-utils', version: gatkVersion
implementation group: 'com.github.samtools', name: 'htsjdk', version: htsjdkVersion
implementation googleCloudNioDependency
// this comes built-in when running on Google Dataproc, but the library
// allows us to read from GCS also when testing locally (or on non-Dataproc clusters,
// should we want to)
//implementation 'com.google.cloud.bigdataoss:gcs-connector:1.6.3-hadoop2'
implementation group: 'com.milaboratory', name: 'milib', version: '2.1.0'
implementation 'com.opencsv:opencsv:5.9'
implementation 'org.apache.commons:commons-lang3:3.14.0'
implementation group: 'commons-io', name: 'commons-io', version: '2.15.1'
// compilation for testing
testImplementation 'org.testng:testng:' + testNGVersion
implementation 'org.biojava:biojava-core:7.1.0'
implementation group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.12.0'
implementation group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.12.7.1'
implementation 'org.apache.logging.log4j:log4j-api:' + log4j2Version
implementation 'org.apache.logging.log4j:log4j-core:' + log4j2Version
// include the apache commons-logging bridge that matches the log4j version we use so
// messages that originate with dependencies that use commons-logging (such as jexl)
// are routed to log4j
implementation 'org.apache.logging.log4j:log4j-jcl:' + log4j2Version
}
wrapper {
gradleVersion = '8.2.1'
}
final runtimeAddOpens = [
// taken from the union of everything encountered by tests, plus everything defined here:
// https://github.com/apache/spark/blob/v3.3.0/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
'java.base/java.lang=ALL-UNNAMED',
'java.base/java.lang.invoke=ALL-UNNAMED',
'java.base/java.lang.reflect=ALL-UNNAMED',
'java.base/java.io=ALL-UNNAMED',
'java.base/java.net=ALL-UNNAMED',
'java.base/java.nio=ALL-UNNAMED',
'java.base/java.util=ALL-UNNAMED',
'java.base/java.util.concurrent=ALL-UNNAMED',
'java.base/java.util.concurrent.atomic=ALL-UNNAMED',
'java.base/sun.nio.ch=ALL-UNNAMED',
'java.base/sun.nio.cs=ALL-UNNAMED',
'java.base/sun.security.action=ALL-UNNAMED',
'java.base/sun.util.calendar=ALL-UNNAMED',
'java.base/sun.nio.fs=ALL-UNNAMED',
'java.base/java.nio.channels.spi=ALL-UNNAMED',
'java.base/jdk.internal.ref=ALL-UNNAMED',
'java.base/java.lang.ref=ALL-UNNAMED',
'java.base/java.util.zip=ALL-UNNAMED',
'java.base/java.util.jar=ALL-UNNAMED',
'java.base/java.nio.file.attribute=ALL-UNNAMED',
'java.base/jdk.internal.loader=ALL-UNNAMED',
'java.base/sun.net.www.protocol.jar=ALL-UNNAMED',
'java.base/sun.invoke.util=ALL-UNNAMED',
'java.base/java.util.concurrent.locks=ALL-UNNAMED',
'java.base/java.security=ALL-UNNAMED',
'java.base/sun.reflect.annotation=ALL-UNNAMED',
'java.base/java.text=ALL-UNNAMED',
'java.base/java.nio.charset=ALL-UNNAMED',
'java.base/sun.reflect.generics.reflectiveObjects=ALL-UNNAMED',
'java.management/com.sun.jmx.mbeanserver=ALL-UNNAMED',
'java.management/javax.management=ALL-UNNAMED',
'java.base/java.util.regex=ALL-UNNAMED',
'java.base/sun.util.locale=ALL-UNNAMED',
'java.base/jdk.internal.math=ALL-UNNAMED',
'java.xml/com.sun.xml.internal.stream.util=ALL-UNNAMED',
'java.base/java.time=ALL-UNNAMED',
'java.base/sun.reflect.generics.factory=ALL-UNNAMED',
'java.base/java.nio.channels=ALL-UNNAMED',
'java.base/sun.security.util=ALL-UNNAMED',
'java.base/java.time.zone=ALL-UNNAMED',
'java.base/sun.reflect.generics.scope=ALL-UNNAMED',
'java.base/sun.reflect.generics.tree=ALL-UNNAMED',
'java.management/com.sun.jmx.interceptor=ALL-UNNAMED',
'java.management/javax.management.openmbean=ALL-UNNAMED',
'java.management/sun.management=ALL-UNNAMED',
'jdk.management/com.sun.management.internal=ALL-UNNAMED',
'jdk.management.jfr/jdk.management.jfr=ALL-UNNAMED',
'jdk.jfr/jdk.jfr.internal.management=ALL-UNNAMED',
'java.base/jdk.internal.module=ALL-UNNAMED',
'java.base/java.lang.module=ALL-UNNAMED',
'java.security.jgss/sun.security.krb5=ALL-UNNAMED'
]
final testAddOpens = [
'java.prefs/java.util.prefs=ALL-UNNAMED' // required for jacoco tasks
]
tasks.withType(Jar).configureEach {
// transform the list of --add-opens directives into manifest format, which requires only the source
// package (unlike the command line equivalent, in the manifest the "ALL-UNNAMED" target is implied
// and can't be included in the manifest entry syntax)
final manifestAddOpens = runtimeAddOpens.stream()
.map(o -> o.substring(0, (o.length() - "ALL-UNNAMED".length()) - 1))
.collect(Collectors.joining(' '))
manifest {
attributes 'Implementation-Title': 'DISCVR-seq Toolkit',
'Implementation-Version': archiveVersion,
'Main-Class': mainClassName,
'GATK-Version': gatkVersion,
'htsjdk-Version': htsjdkVersion,
'Multi-Release': 'true',
'Add-Opens': manifestAddOpens
}
}
tasks.withType(ShadowJar).configureEach {
from(project.sourceSets.main.output)
archiveBaseName = project.name
mergeServiceFiles()
relocate 'com.google.common', 'org.broadinstitute.hellbender.relocated.com.google.common'
zip64 true
exclude 'log4j.properties' // from adam jar as it clashes with hellbender's log4j2.xml
exclude '**/*.SF' // these are Manifest signature files and
exclude '**/*.RSA' // keys which may accidentally be imported from other signed projects and then fail at runtime
// Suggested by the akka devs to make sure that we do not get the spark configuration error.
// http://doc.akka.io/docs/akka/snapshot/general/configuration.html#When_using_JarJar__OneJar__Assembly_or_any_jar-bundler
transform(com.github.jengelman.gradle.plugins.shadow.transformers.AppendingTransformer) {
resource = 'reference.conf'
}
}
shadowJar {
configurations = [project.configurations.runtimeClasspath]
archiveBaseName = project.name
archiveClassifier = ''
mergeServiceFiles('reference.conf')
dependsOn(distTar, distZip, startScripts)
}
tasks.register('localJar') { dependsOn shadowJar }
tasks.register('sourcesJar', Jar) {
from sourceSets.main.allSource
archiveClassifier = 'sources'
}
tasks.register('copyShadowJar', Copy) {
dependsOn shadowJar
from(shadowJar.archivePath)
into "$buildDir/jars"
doLast {
logger.lifecycle("Created DISCVRseq JAR in ${destinationDir}")
}
}
tasks.withType(Javadoc).configureEach {
// do this for all javadoc tasks, including toolDoc
options.addStringOption('Xdoclint:none')
outputs.upToDateWhen { false }
}
javadoc {
// This is a hack to disable the java 8 default javadoc lint until we fix the html formatting
// We only want to do this for the javadoc task, not toolDoc
options.addStringOption('Xdoclint:none', '-quiet')
source = sourceSets.main.allJava + files(configurations.externalSourceConfiguration.collect { zipTree(it) })
include '**/*.java'
}
// Generate Online Doc
tasks.register('toolDoc', Javadoc) {
dependsOn classes
final File baseDocDir = new File("$buildDir/docs")
final File toolDocDir = new File("$buildDir/docs/toolDoc")
doFirst {
// make sure the output folder exists or we can create it
if (!toolDocDir.exists() && !toolDocDir.mkdirs()) {
throw new GradleException(String.format("Failure creating folder (%s) for HTML doc output in task (%s)",
toolDocDir.getAbsolutePath(),
it.name));
}
copy {
from('src/main/resources/com/github/discvrseq/util/docTemplates')
include 'stylesheet.css'
include 'images/**'
include 'resources/**'
into toolDocDir
}
copy {
from('src/test/resources/com/github/discvrseq/TestData')
include 'SimpleExample.vcf.gz'
include 'SimpleExample.vcf.gz.tbi'
into toolDocDir
}
copy {
from('src/main/resources/com/github/discvrseq/util/docTemplates')
include 'index.md'
include 'README.md'
include '_config.yml'
into baseDocDir
}
}
source = sourceSets.main.allJava + files(configurations.externalSourceConfiguration.collect { zipTree(it) })
include '**com/github/discvrseq/**/*.java'
include '**org/broadinstitute/hellbender/cmdline/argumentcollections/**/*.java'
include '**org/broadinstitute/hellbender/cmdline/**/*.java'
// The doc process instantiates any documented feature classes, so to run it we need the entire
// runtime classpath, as well as jdk javadoc files such as tools.jar, where com.sun.javadoc lives.
classpath = sourceSets.main.runtimeClasspath
options.docletpath = classpath as List
options.doclet = "com.github.discvrseq.util.help.DISCVRSeqHelpDoclet"
outputs.dir(toolDocDir)
options.destinationDirectory(toolDocDir)
options.addStringOption("settings-dir", "src/main/resources/com/github/discvrseq/util/docTemplates");
options.addStringOption("absolute-version", getVersion())
options.addStringOption("build-timestamp", new Date().format("dd-MM-yyyy hh:mm:ss"))
//gradle 6.x+ defaults to setting this true which breaks the barclay doclet
options.noTimestamp(false)
}
test {
// transform the list test configuration --add-opens (which must include both the runtime and test args) into
// command line argument format
final testJVMAddOpens = new ArrayList<String>();
testJVMAddOpens.addAll(runtimeAddOpens);
testJVMAddOpens.addAll(testAddOpens);
final testConfigurationJVMArgs = testJVMAddOpens.stream()
.flatMap(openSpec -> ['--add-opens', openSpec].stream())
.toList()
// add in any other required args
testConfigurationJVMArgs.add('-Dio.netty.tryReflectionSetAccessible=true')
jvmArgs = testConfigurationJVMArgs
outputs.upToDateWhen { false } //tests will never be "up to date" so you can always rerun them
String TEST_VERBOSITY = "$System.env.TEST_VERBOSITY"
useTestNG {
}
systemProperty "samjdk.use_async_io_read_samtools", "false"
systemProperty "samjdk.use_async_io_write_samtools", "true"
systemProperty "samjdk.use_async_io_write_tribble", "false"
systemProperty "samjdk.compression_level", "1"
systemProperty "gatk.spark.debug", System.getProperty("gatk.spark.debug")
// set heap size for the test JVM(s)
minHeapSize = "1G"
maxHeapSize = "4G"
if (TEST_VERBOSITY == "minimal") {
int count = 0
// listen to events in the test execution lifecycle
beforeTest { descriptor ->
count++
if( count % 10000 == 0) {
logger.lifecycle("Finished "+ Integer.toString(count++) + " tests")
}
}
} else {
// show standard out and standard error of the test JVM(s) on the console
testLogging.showStandardStreams = true
beforeTest { descriptor ->
logger.lifecycle("Running Test: " + descriptor)
}
// listen to standard out and standard error of the test JVM(s)
onOutput { descriptor, event ->
logger.lifecycle("Test: " + descriptor + " produced standard out/err: " + event.message )
}
}
testLogging {
testLogging {
events "skipped", "failed"
exceptionFormat = "full"
}
afterSuite { desc, result ->
if (!desc.parent) { // will match the outermost suite
println "Results: ${result.resultType} (${result.testCount} tests, ${result.successfulTestCount} successes, ${result.failedTestCount} failures, ${result.skippedTestCount} skipped)"
}
}
}
}