Skip to content

Commit 2fd3469

Browse files
authored
Ensure build resolution is kept when packaging assemblies with provided dependencies (#2457)
1 parent 6ca1cfe commit 2fd3469

File tree

2 files changed

+70
-8
lines changed

2 files changed

+70
-8
lines changed

modules/directives/src/main/scala/scala/build/preprocessing/directives/Packaging.scala

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,8 @@ import scala.build.errors.{
1111
MalformedInputError,
1212
ModuleFormatError
1313
}
14+
import scala.build.options._
1415
import scala.build.options.packaging.{DockerOptions, NativeImageOptions}
15-
import scala.build.options.{
16-
BuildOptions,
17-
JavaOpt,
18-
PackageOptions,
19-
PackageType,
20-
PostBuildOptions,
21-
ShadowingSeq
22-
}
2316
import scala.build.{Logger, Positioned, options}
2417
import scala.cli.commands.SpecificationLevel
2518

@@ -100,6 +93,9 @@ final case class Packaging(
10093
}
10194

10295
BuildOptions(
96+
internal = InternalOptions(
97+
keepResolution = provided0.nonEmpty || packageTypeOpt.contains(PackageType.Spark)
98+
),
10399
notForBloopOptions = PostBuildOptions(
104100
packageOptions = PackageOptions(
105101
packageTypeOpt = packageTypeOpt,

modules/integration/src/test/scala/scala/cli/integration/PackageTestDefinitions.scala

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1057,4 +1057,70 @@ abstract class PackageTestDefinitions(val scalaVersionOpt: Option[String])
10571057
expect(outputAssembly == root.toString)
10581058
}
10591059
}
1060+
1061+
if (actualScalaVersion.startsWith("2")) {
1062+
test("resolution is kept for assemblies with provided spark deps (packaging.provided)") {
1063+
val msg = "Hello"
1064+
val inputPath = os.rel / "Hello.scala"
1065+
TestInputs(
1066+
inputPath ->
1067+
s"""//> using lib org.apache.spark::spark-sql:3.3.2
1068+
|//> using lib org.apache.spark::spark-hive:3.3.2
1069+
|//> using lib org.apache.spark::spark-sql-kafka-0-10:3.3.2
1070+
|//> using packaging.packageType assembly
1071+
|//> using packaging.provided org.apache.spark::spark-sql
1072+
|//> using packaging.provided org.apache.spark::spark-hive
1073+
|
1074+
|object Main extends App {
1075+
| println("$msg")
1076+
|}
1077+
|""".stripMargin
1078+
).fromRoot { root =>
1079+
val outputJarPath = root / "Hello.jar"
1080+
val res = os.proc(
1081+
TestUtil.cli,
1082+
"--power",
1083+
"package",
1084+
inputPath,
1085+
"-o",
1086+
outputJarPath,
1087+
extraOptions
1088+
).call(cwd = root, stderr = os.Pipe)
1089+
expect(os.isFile(outputJarPath))
1090+
expect(res.err.trim().contains(s"Wrote $outputJarPath"))
1091+
}
1092+
}
1093+
1094+
test(
1095+
"resolution is kept for assemblies with provided spark deps (packaging.packageType spark)"
1096+
) {
1097+
val msg = "Hello"
1098+
val inputPath = os.rel / "Hello.scala"
1099+
TestInputs(
1100+
inputPath ->
1101+
s"""//> using lib org.apache.spark::spark-sql:3.3.2
1102+
|//> using lib org.apache.spark::spark-hive:3.3.2
1103+
|//> using lib org.apache.spark::spark-sql-kafka-0-10:3.3.2
1104+
|//> using packaging.packageType spark
1105+
|
1106+
|object Main extends App {
1107+
| println("$msg")
1108+
|}
1109+
|""".stripMargin
1110+
).fromRoot { root =>
1111+
val outputJarPath = root / "Hello.jar"
1112+
val res = os.proc(
1113+
TestUtil.cli,
1114+
"--power",
1115+
"package",
1116+
inputPath,
1117+
"-o",
1118+
outputJarPath,
1119+
extraOptions
1120+
).call(cwd = root, stderr = os.Pipe)
1121+
expect(os.isFile(outputJarPath))
1122+
expect(res.err.trim().contains(s"Wrote $outputJarPath"))
1123+
}
1124+
}
1125+
}
10601126
}

0 commit comments

Comments
 (0)