Skip to content

Commit ad9f67a

Browse files
authored
Make Delta able to cross-compile against Spark Latest Release (3.5) and Spark Master (4.0) (delta-io#2877)
#### Which Delta project/connector is this regarding? - [X] Spark - [ ] Standalone - [ ] Flink - [ ] Kernel - [ ] Other (fill in here) ## Description ### What DOES this PR do? - changes Delta's `build.sbt` to compile `delta-spark` against spark master. compilation succeeds. tests pass against spark 3.5. tests run but fail against spark master - e.g. `build/sbt -DsparkVersion=master spark/test` - the default spark version for Delta is still Spark 3.5 - testing requires building unidoc for (unfortunately) ALL projects in build.sbt. that breaks since spark master uses JDK 17 but delta-iceberg uses JDK 8. thus, we disable unidoc for delta-spark compiling against spark-master for now. - Delta: creates `spark-3.5` and `spark-master` folders. Delta will be able to cross compile against both. These folders will contain `shims` (code that will be selectively pulled to compile against a single spark version) but also spark-version-only code ### What does this PR NOT do? - this PR does not update any build infra (GitHub actions) to actually compile or test delta-spark against Spark Master. That will come later. ## How was this patch tested? Existing tests. `build/sbt -DsparkVersion=3.5 spark/test` ✅ `build/sbt -DsparkVersion=master spark/compile` ✅ `build/sbt -DsparkVersion=master spark/test` ❌ (expected, these fixes will come later) ## Does this PR introduce _any_ user-facing changes? No
1 parent 3dcbbb8 commit ad9f67a

File tree

15 files changed

+398
-97
lines changed

15 files changed

+398
-97
lines changed

build.sbt

Lines changed: 126 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,13 @@ val all_scala_versions = Seq(scala212, scala213)
3434
val default_scala_version = settingKey[String]("Default Scala version")
3535
Global / default_scala_version := scala212
3636

37+
val LATEST_RELEASED_SPARK_VERSION = "3.5.0"
38+
val SPARK_MASTER_VERSION = "4.0.0-SNAPSHOT"
39+
val sparkVersion = settingKey[String]("Spark version")
40+
spark / sparkVersion := getSparkVersion()
41+
3742
// Dependent library versions
38-
val sparkVersion = "3.5.0"
43+
val defaultSparkVersion = LATEST_RELEASED_SPARK_VERSION
3944
val flinkVersion = "1.16.1"
4045
val hadoopVersion = "3.3.4"
4146
val scalaTestVersion = "3.2.15"
@@ -62,6 +67,37 @@ crossScalaVersions := Nil
6267
val targetJvm = settingKey[String]("Target JVM version")
6368
Global / targetJvm := "1.8"
6469

70+
/**
71+
* Returns the current spark version, which is the same value as `sparkVersion.value`.
72+
*
73+
* This logic exists in a separate method because some call sites cannot access `sparkVersion.value`
74+
* e.g. callers that are not inside tasks or setting macros.
75+
*/
76+
def getSparkVersion(): String = {
77+
val latestReleasedSparkVersionShort = getMajorMinorPatch(LATEST_RELEASED_SPARK_VERSION) match {
78+
case (maj, min, _) => s"$maj.$min"
79+
}
80+
val allValidSparkVersionInputs = Seq(
81+
"master",
82+
"latest",
83+
SPARK_MASTER_VERSION,
84+
LATEST_RELEASED_SPARK_VERSION,
85+
latestReleasedSparkVersionShort
86+
)
87+
88+
// e.g. build/sbt -DsparkVersion=master, build/sbt -DsparkVersion=4.0.0-SNAPSHOT
89+
val input = sys.props.getOrElse("sparkVersion", LATEST_RELEASED_SPARK_VERSION)
90+
input match {
91+
case LATEST_RELEASED_SPARK_VERSION | "latest" | `latestReleasedSparkVersionShort` =>
92+
LATEST_RELEASED_SPARK_VERSION
93+
case SPARK_MASTER_VERSION | "master" =>
94+
SPARK_MASTER_VERSION
95+
case _ =>
96+
throw new IllegalArgumentException(s"Invalid sparkVersion: $input. Must be one of " +
97+
s"${allValidSparkVersionInputs.mkString("{", ",", "}")}")
98+
}
99+
}
100+
65101
lazy val commonSettings = Seq(
66102
organization := "io.delta",
67103
scalaVersion := default_scala_version.value,
@@ -90,6 +126,58 @@ lazy val commonSettings = Seq(
90126
unidocSourceFilePatterns := Nil,
91127
)
92128

129+
/**
130+
* Note: we cannot access sparkVersion.value here, since that can only be used within a task or
131+
* setting macro.
132+
*/
133+
def crossSparkSettings(): Seq[Setting[_]] = getSparkVersion() match {
134+
case LATEST_RELEASED_SPARK_VERSION => Seq(
135+
scalaVersion := default_scala_version.value,
136+
crossScalaVersions := all_scala_versions,
137+
targetJvm := "1.8",
138+
// For adding staged Spark RC versions, e.g.:
139+
// resolvers += "Apache Spark 3.5.0 (RC1) Staging" at "https://repository.apache.org/content/repositories/orgapachespark-1444/",
140+
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / "src" / "main" / "scala-spark-3.5",
141+
Antlr4 / antlr4Version := "4.9.3",
142+
143+
// Java-/Scala-/Uni-Doc Settings
144+
scalacOptions ++= Seq(
145+
"-P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
146+
),
147+
unidocSourceFilePatterns := Seq(SourceFilePattern("io/delta/tables/", "io/delta/exceptions/"))
148+
)
149+
150+
case SPARK_MASTER_VERSION => Seq(
151+
scalaVersion := scala213,
152+
crossScalaVersions := Seq(scala213),
153+
targetJvm := "17",
154+
resolvers += "Spark master staging" at "https://repository.apache.org/content/groups/snapshots/",
155+
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / "src" / "main" / "scala-spark-master",
156+
Antlr4 / antlr4Version := "4.13.1",
157+
Test / javaOptions ++= Seq(
158+
// Copied from SparkBuild.scala to support Java 17 for unit tests (see apache/spark#34153)
159+
"--add-opens=java.base/java.lang=ALL-UNNAMED",
160+
"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
161+
"--add-opens=java.base/java.io=ALL-UNNAMED",
162+
"--add-opens=java.base/java.net=ALL-UNNAMED",
163+
"--add-opens=java.base/java.nio=ALL-UNNAMED",
164+
"--add-opens=java.base/java.util=ALL-UNNAMED",
165+
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
166+
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
167+
"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
168+
"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
169+
"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
170+
)
171+
172+
// Java-/Scala-/Uni-Doc Settings
173+
// This isn't working yet against Spark Master.
174+
// 1) delta-spark on Spark Master uses JDK 17. delta-iceberg uses JDK 8 or 11. For some reason,
175+
// generating delta-spark unidoc compiles delta-iceberg
176+
// 2) delta-spark unidoc fails to compile. spark 3.5 is on its classpath. likely due to iceberg
177+
// issue above.
178+
)
179+
}
180+
93181
lazy val spark = (project in file("spark"))
94182
.dependsOn(storage)
95183
.enablePlugins(Antlr4Plugin)
@@ -99,29 +187,26 @@ lazy val spark = (project in file("spark"))
99187
scalaStyleSettings,
100188
sparkMimaSettings,
101189
releaseSettings,
190+
crossSparkSettings(),
102191
libraryDependencies ++= Seq(
103192
// Adding test classifier seems to break transitive resolution of the core dependencies
104-
"org.apache.spark" %% "spark-hive" % sparkVersion % "provided",
105-
"org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
106-
"org.apache.spark" %% "spark-core" % sparkVersion % "provided",
107-
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "provided",
193+
"org.apache.spark" %% "spark-hive" % sparkVersion.value % "provided",
194+
"org.apache.spark" %% "spark-sql" % sparkVersion.value % "provided",
195+
"org.apache.spark" %% "spark-core" % sparkVersion.value % "provided",
196+
"org.apache.spark" %% "spark-catalyst" % sparkVersion.value % "provided",
108197

109198
// Test deps
110199
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
111200
"org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test",
112201
"junit" % "junit" % "4.12" % "test",
113202
"com.novocode" % "junit-interface" % "0.11" % "test",
114-
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
115-
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
116-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests",
117-
"org.apache.spark" %% "spark-hive" % sparkVersion % "test" classifier "tests",
203+
"org.apache.spark" %% "spark-catalyst" % sparkVersion.value % "test" classifier "tests",
204+
"org.apache.spark" %% "spark-core" % sparkVersion.value % "test" classifier "tests",
205+
"org.apache.spark" %% "spark-sql" % sparkVersion.value % "test" classifier "tests",
206+
"org.apache.spark" %% "spark-hive" % sparkVersion.value % "test" classifier "tests",
118207
),
119-
// For adding staged Spark RC versions, Ex:
120-
// resolvers += "Apche Spark 3.5.0 (RC1) Staging" at "https://repository.apache.org/content/repositories/orgapachespark-1444/",
121208
Compile / packageBin / mappings := (Compile / packageBin / mappings).value ++
122209
listPythonFiles(baseDirectory.value.getParentFile / "python"),
123-
124-
Antlr4 / antlr4Version:= "4.9.3",
125210
Antlr4 / antlr4PackageName := Some("io.delta.sql.parser"),
126211
Antlr4 / antlr4GenListener := true,
127212
Antlr4 / antlr4GenVisitor := true,
@@ -132,10 +217,6 @@ lazy val spark = (project in file("spark"))
132217
// Don't execute in parallel since we can't have multiple Sparks in the same JVM
133218
Test / parallelExecution := false,
134219

135-
scalacOptions ++= Seq(
136-
"-P:genjavadoc:strictVisibility=true" // hide package private types and methods in javadoc
137-
),
138-
139220
javaOptions += "-Xmx1024m",
140221

141222
// Configurations to speed up tests and reduce memory footprint
@@ -172,11 +253,11 @@ lazy val spark = (project in file("spark"))
172253
Seq(file)
173254
},
174255
TestParallelization.settings,
175-
176-
// Unidoc settings
177-
unidocSourceFilePatterns := Seq(SourceFilePattern("io/delta/tables/", "io/delta/exceptions/")),
178256
)
179-
.configureUnidoc(generateScalaDoc = true)
257+
.configureUnidoc(
258+
generatedJavaDoc = getSparkVersion() == LATEST_RELEASED_SPARK_VERSION,
259+
generateScalaDoc = getSparkVersion() == LATEST_RELEASED_SPARK_VERSION
260+
)
180261

181262
lazy val contribs = (project in file("contribs"))
182263
.dependsOn(spark % "compile->compile;test->test;provided->provided")
@@ -225,7 +306,7 @@ lazy val sharing = (project in file("sharing"))
225306
releaseSettings,
226307
Test / javaOptions ++= Seq("-ea"),
227308
libraryDependencies ++= Seq(
228-
"org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
309+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "provided",
229310

230311
"io.delta" %% "delta-sharing-client" % "1.0.4",
231312

@@ -234,10 +315,10 @@ lazy val sharing = (project in file("sharing"))
234315
"org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test",
235316
"junit" % "junit" % "4.12" % "test",
236317
"com.novocode" % "junit-interface" % "0.11" % "test",
237-
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
238-
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
239-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests",
240-
"org.apache.spark" %% "spark-hive" % sparkVersion % "test" classifier "tests",
318+
"org.apache.spark" %% "spark-catalyst" % defaultSparkVersion % "test" classifier "tests",
319+
"org.apache.spark" %% "spark-core" % defaultSparkVersion % "test" classifier "tests",
320+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "test" classifier "tests",
321+
"org.apache.spark" %% "spark-hive" % defaultSparkVersion % "test" classifier "tests",
241322
)
242323
).configureUnidoc()
243324

@@ -290,10 +371,10 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
290371
"org.openjdk.jmh" % "jmh-core" % "1.37" % "test",
291372
"org.openjdk.jmh" % "jmh-generator-annprocess" % "1.37" % "test",
292373

293-
"org.apache.spark" %% "spark-hive" % sparkVersion % "test" classifier "tests",
294-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests",
295-
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
296-
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
374+
"org.apache.spark" %% "spark-hive" % defaultSparkVersion % "test" classifier "tests",
375+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "test" classifier "tests",
376+
"org.apache.spark" %% "spark-core" % defaultSparkVersion % "test" classifier "tests",
377+
"org.apache.spark" %% "spark-catalyst" % defaultSparkVersion % "test" classifier "tests",
297378
),
298379
javaCheckstyleSettings("kernel/dev/checkstyle.xml"),
299380
// Unidoc settings
@@ -346,7 +427,7 @@ lazy val storageS3DynamoDB = (project in file("storage-s3-dynamodb"))
346427
).configureUnidoc()
347428

348429
val icebergSparkRuntimeArtifactName = {
349-
val (expMaj, expMin, _) = getMajorMinorPatch(sparkVersion)
430+
val (expMaj, expMin, _) = getMajorMinorPatch(defaultSparkVersion)
350431
s"iceberg-spark-runtime-$expMaj.$expMin"
351432
}
352433

@@ -362,7 +443,7 @@ lazy val testDeltaIcebergJar = (project in file("testDeltaIcebergJar"))
362443
libraryDependencies ++= Seq(
363444
"org.apache.hadoop" % "hadoop-client" % hadoopVersion,
364445
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
365-
"org.apache.spark" %% "spark-core" % sparkVersion % "test"
446+
"org.apache.spark" %% "spark-core" % defaultSparkVersion % "test"
366447
)
367448
)
368449

@@ -496,7 +577,7 @@ lazy val hudi = (project in file("hudi"))
496577
ExclusionRule(organization = "org.apache.hadoop"),
497578
ExclusionRule(organization = "org.apache.zookeeper"),
498579
),
499-
"org.apache.spark" %% "spark-avro" % sparkVersion % "test" excludeAll ExclusionRule(organization = "org.apache.hadoop"),
580+
"org.apache.spark" %% "spark-avro" % defaultSparkVersion % "test" excludeAll ExclusionRule(organization = "org.apache.hadoop"),
500581
"org.apache.parquet" % "parquet-avro" % "1.12.3" % "compile"
501582
),
502583
assembly / assemblyJarName := s"${name.value}-assembly_${scalaBinaryVersion.value}-${version.value}.jar",
@@ -974,10 +1055,10 @@ lazy val compatibility = (project in file("connectors/oss-compatibility-tests"))
9741055
"io.netty" % "netty-buffer" % "4.1.63.Final" % "test",
9751056
"org.scalatest" %% "scalatest" % "3.1.0" % "test",
9761057
"commons-io" % "commons-io" % "2.8.0" % "test",
977-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test",
978-
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
979-
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
980-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests",
1058+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "test",
1059+
"org.apache.spark" %% "spark-catalyst" % defaultSparkVersion % "test" classifier "tests",
1060+
"org.apache.spark" %% "spark-core" % defaultSparkVersion % "test" classifier "tests",
1061+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "test" classifier "tests",
9811062
)
9821063
)
9831064
*/
@@ -992,10 +1073,10 @@ lazy val goldenTables = (project in file("connectors/golden-tables"))
9921073
// Test Dependencies
9931074
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
9941075
"commons-io" % "commons-io" % "2.8.0" % "test",
995-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test",
996-
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
997-
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
998-
"org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests"
1076+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "test",
1077+
"org.apache.spark" %% "spark-catalyst" % defaultSparkVersion % "test" classifier "tests",
1078+
"org.apache.spark" %% "spark-core" % defaultSparkVersion % "test" classifier "tests",
1079+
"org.apache.spark" %% "spark-sql" % defaultSparkVersion % "test" classifier "tests"
9991080
)
10001081
)
10011082

@@ -1018,13 +1099,13 @@ lazy val sqlDeltaImport = (project in file("connectors/sql-delta-import"))
10181099
Test / publishArtifact := false,
10191100
libraryDependencies ++= Seq(
10201101
"io.netty" % "netty-buffer" % "4.1.63.Final" % "test",
1021-
"org.apache.spark" % ("spark-sql_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % sparkVersion % "provided",
1102+
"org.apache.spark" % ("spark-sql_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % defaultSparkVersion % "provided",
10221103
"org.rogach" %% "scallop" % "3.5.1",
10231104
"org.scalatest" %% "scalatest" % scalaTestVersionForConnectors % "test",
10241105
"com.h2database" % "h2" % "1.4.200" % "test",
1025-
"org.apache.spark" % ("spark-catalyst_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % sparkVersion % "test",
1026-
"org.apache.spark" % ("spark-core_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % sparkVersion % "test",
1027-
"org.apache.spark" % ("spark-sql_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % sparkVersion % "test"
1106+
"org.apache.spark" % ("spark-catalyst_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % defaultSparkVersion % "test",
1107+
"org.apache.spark" % ("spark-core_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % defaultSparkVersion % "test",
1108+
"org.apache.spark" % ("spark-sql_" + sqlDeltaImportScalaVersion(scalaBinaryVersion.value)) % defaultSparkVersion % "test"
10281109
)
10291110
)
10301111

project/Unidoc.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,10 @@ object Unidoc {
4545
implicit class UnidocHelper(val projectToUpdate: Project) {
4646
def configureUnidoc(
4747
docTitle: String = null,
48+
generatedJavaDoc: Boolean = true,
4849
generateScalaDoc: Boolean = false
4950
): Project = {
51+
if (!generatedJavaDoc && !generateScalaDoc) return projectToUpdate
5052

5153
var updatedProject: Project = projectToUpdate
5254
if (generateScalaDoc) {
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
/*
2+
* Copyright (2024) The Delta Lake Project Authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.apache.spark.sql.delta.shims
18+
19+
import org.apache.spark.sql.catalyst.parser.ParserInterface
20+
import org.apache.spark.sql.types.{StructField, StructType}
21+
22+
object ColumnDefinitionShim {
23+
24+
/**
25+
* Helps handle a breaking change in [[org.apache.spark.sql.catalyst.plans.logical.CreateTable]]
26+
* between Spark 3.5 and Spark 4.0:
27+
* - In 3.5, `CreateTable` accepts a `tableSchema: StructType`.
28+
* - In 4.0, `CreateTable` accepts a `columns: Seq[ColumnDefinition]`.
29+
*/
30+
def parseColumns(columns: Seq[StructField], sqlParser: ParserInterface): StructType = {
31+
StructType(columns.toSeq)
32+
}
33+
}
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
/*
2+
* Copyright (2024) The Delta Lake Project Authors.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package org.apache.spark.sql.delta.shims
18+
19+
import org.apache.spark.sql.SparkSession
20+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
21+
import org.apache.spark.sql.execution.streaming.IncrementalExecution
22+
23+
object IncrementalExecutionShim {
24+
25+
/**
26+
* Handles a breaking change in the [[IncrementalExecution]] constructor between Spark 3.5 and
27+
* 4.0:
28+
* - Spark 3.5: no `isFirstBatch: Boolean` param
29+
* - Spark 4.0: adds `isFirstBatch: Boolean` param
30+
*/
31+
def newInstance(
32+
sparkSession: SparkSession,
33+
logicalPlan: LogicalPlan,
34+
incrementalExecution: IncrementalExecution): IncrementalExecution = new IncrementalExecution(
35+
sparkSession,
36+
logicalPlan,
37+
incrementalExecution.outputMode,
38+
incrementalExecution.checkpointLocation,
39+
incrementalExecution.queryId,
40+
incrementalExecution.runId,
41+
incrementalExecution.currentBatchId,
42+
incrementalExecution.prevOffsetSeqMetadata,
43+
incrementalExecution.offsetSeqMetadata,
44+
incrementalExecution.watermarkPropagator
45+
)
46+
}

0 commit comments

Comments
 (0)