Skip to content

Commit

Permalink
Ensure build resolution is kept when packaging assemblies with provid…
Browse files Browse the repository at this point in the history
…ed dependencies (#2457)
  • Loading branch information
Gedochao authored Oct 11, 2023
1 parent 6ca1cfe commit 2fd3469
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,8 @@ import scala.build.errors.{
MalformedInputError,
ModuleFormatError
}
import scala.build.options._
import scala.build.options.packaging.{DockerOptions, NativeImageOptions}
import scala.build.options.{
BuildOptions,
JavaOpt,
PackageOptions,
PackageType,
PostBuildOptions,
ShadowingSeq
}
import scala.build.{Logger, Positioned, options}
import scala.cli.commands.SpecificationLevel

Expand Down Expand Up @@ -100,6 +93,9 @@ final case class Packaging(
}

BuildOptions(
internal = InternalOptions(
keepResolution = provided0.nonEmpty || packageTypeOpt.contains(PackageType.Spark)
),
notForBloopOptions = PostBuildOptions(
packageOptions = PackageOptions(
packageTypeOpt = packageTypeOpt,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1057,4 +1057,70 @@ abstract class PackageTestDefinitions(val scalaVersionOpt: Option[String])
expect(outputAssembly == root.toString)
}
}

if (actualScalaVersion.startsWith("2")) {
test("resolution is kept for assemblies with provided spark deps (packaging.provided)") {
val msg = "Hello"
val inputPath = os.rel / "Hello.scala"
TestInputs(
inputPath ->
s"""//> using lib org.apache.spark::spark-sql:3.3.2
|//> using lib org.apache.spark::spark-hive:3.3.2
|//> using lib org.apache.spark::spark-sql-kafka-0-10:3.3.2
|//> using packaging.packageType assembly
|//> using packaging.provided org.apache.spark::spark-sql
|//> using packaging.provided org.apache.spark::spark-hive
|
|object Main extends App {
| println("$msg")
|}
|""".stripMargin
).fromRoot { root =>
val outputJarPath = root / "Hello.jar"
val res = os.proc(
TestUtil.cli,
"--power",
"package",
inputPath,
"-o",
outputJarPath,
extraOptions
).call(cwd = root, stderr = os.Pipe)
expect(os.isFile(outputJarPath))
expect(res.err.trim().contains(s"Wrote $outputJarPath"))
}
}

test(
"resolution is kept for assemblies with provided spark deps (packaging.packageType spark)"
) {
val msg = "Hello"
val inputPath = os.rel / "Hello.scala"
TestInputs(
inputPath ->
s"""//> using lib org.apache.spark::spark-sql:3.3.2
|//> using lib org.apache.spark::spark-hive:3.3.2
|//> using lib org.apache.spark::spark-sql-kafka-0-10:3.3.2
|//> using packaging.packageType spark
|
|object Main extends App {
| println("$msg")
|}
|""".stripMargin
).fromRoot { root =>
val outputJarPath = root / "Hello.jar"
val res = os.proc(
TestUtil.cli,
"--power",
"package",
inputPath,
"-o",
outputJarPath,
extraOptions
).call(cwd = root, stderr = os.Pipe)
expect(os.isFile(outputJarPath))
expect(res.err.trim().contains(s"Wrote $outputJarPath"))
}
}
}
}

0 comments on commit 2fd3469

Please sign in to comment.