This repository has been archived by the owner on Dec 20, 2018. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 307
/
build.sbt
125 lines (91 loc) · 3.88 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
name := "spark-avro"
organization := "com.databricks"
scalaVersion := "2.11.8"
crossScalaVersions := Seq("2.10.6", "2.11.8")
spName := "databricks/spark-avro"
sparkVersion := "2.1.0"
val testSparkVersion = settingKey[String]("The version of Spark to test against.")
testSparkVersion := sys.props.getOrElse("spark.testVersion", sparkVersion.value)
val testHadoopVersion = settingKey[String]("The version of Hadoop to test against.")
testHadoopVersion := sys.props.getOrElse("hadoop.testVersion", "2.2.0")
val testAvroVersion = settingKey[String]("The version of Avro to test against.")
testAvroVersion := sys.props.getOrElse("avro.testVersion", "1.7.6")
val testAvroMapredVersion = settingKey[String]("The version of avro-mapred to test against.")
testAvroMapredVersion := sys.props.getOrElse("avroMapred.testVersion", "1.7.7")
spAppendScalaVersion := true
spIncludeMaven := true
spIgnoreProvided := true
sparkComponents := Seq("sql")
libraryDependencies ++= Seq(
"org.slf4j" % "slf4j-api" % "1.7.5",
"org.apache.avro" % "avro" % "1.7.6" exclude("org.mortbay.jetty", "servlet-api"),
"org.apache.avro" % "avro-mapred" % "1.7.7" % "provided" classifier("hadoop2") exclude("org.mortbay.jetty", "servlet-api"),
// Kryo is provided by Spark, but we need this here in order to be able to import the @DefaultSerializer annotation:
"com.esotericsoftware" % "kryo-shaded" % "3.0.3" % "provided",
"org.scalatest" %% "scalatest" % "2.2.1" % "test",
"commons-io" % "commons-io" % "2.4" % "test"
)
// curator leads to conflicting guava dependencies
val curatorExclusion = ExclusionRule(organization = "org.apache.curator")
libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-client" % testHadoopVersion.value % "test" excludeAll(curatorExclusion),
"org.apache.spark" %% "spark-core" % testSparkVersion.value % "test" exclude("org.apache.hadoop", "hadoop-client") excludeAll(curatorExclusion),
"org.apache.spark" %% "spark-sql" % testSparkVersion.value % "test" exclude("org.apache.hadoop", "hadoop-client") excludeAll(curatorExclusion),
"org.apache.avro" % "avro" % testAvroVersion.value % "test" exclude("org.mortbay.jetty", "servlet-api"),
"org.apache.avro" % "avro-mapred" % testAvroMapredVersion.value % "test" classifier("hadoop2") exclude("org.mortbay.jetty", "servlet-api")
)
// Display full-length stacktraces from ScalaTest:
testOptions in Test += Tests.Argument("-oF")
scalacOptions ++= Seq("-target:jvm-1.7")
javacOptions ++= Seq("-source", "1.7", "-target", "1.7")
coverageHighlighting := {
if (scalaBinaryVersion.value == "2.10") false
else true
}
EclipseKeys.eclipseOutput := Some("target/eclipse")
/********************
* Release settings *
********************/
publishMavenStyle := true
releaseCrossBuild := true
licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))
releasePublishArtifactsAction := PgpKeys.publishSigned.value
pomExtra :=
<url>https://github.com/databricks/spark-avro</url>
<scm>
<url>[email protected]:databricks/spark-avro.git</url>
<connection>scm:git:[email protected]:databricks/spark-avro.git</connection>
</scm>
<developers>
<developer>
<id>marmbrus</id>
<name>Michael Armbrust</name>
<url>https://github.com/marmbrus</url>
</developer>
<developer>
<id>JoshRosen</id>
<name>Josh Rosen</name>
<url>https://github.com/JoshRosen</url>
</developer>
<developer>
<id>vlyubin</id>
<name>Volodymyr Lyubinets</name>
<url>https://github.com/vlyubin</url>
</developer>
</developers>
bintrayReleaseOnPublish in ThisBuild := false
import ReleaseTransformations._
// Add publishing to spark packages as another step.
releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies,
inquireVersions,
runTest,
setReleaseVersion,
commitReleaseVersion,
tagRelease,
publishArtifacts,
setNextVersion,
commitNextVersion,
pushChanges,
releaseStepTask(spPublish)
)