Skip to content

Commit bdf601f

Browse files
committed
WIP can't get spark test to run
1 parent 0c4591e commit bdf601f

File tree

1 file changed

+59
-6
lines changed

1 file changed

+59
-6
lines changed

build.sbt

Lines changed: 59 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -84,10 +84,10 @@ lazy val commonSettings = Seq(
8484
// Can be run explicitly via: build/sbt $module/checkstyle
8585
// Will automatically be run during compilation (e.g. build/sbt compile)
8686
// and during tests (e.g. build/sbt test)
87-
checkstyleConfigLocation := CheckstyleConfigLocation.File("dev/checkstyle.xml"),
88-
checkstyleSeverityLevel := Some(CheckstyleSeverityLevel.Error),
89-
(Compile / checkstyle) := (Compile / checkstyle).triggeredBy(Compile / compile).value,
90-
(Test / checkstyle) := (Test / checkstyle).triggeredBy(Test / compile).value
87+
// checkstyleConfigLocation := CheckstyleConfigLocation.File("dev/checkstyle.xml"),
88+
// checkstyleSeverityLevel := Some(CheckstyleSeverityLevel.Error),
89+
// (Compile / checkstyle) := (Compile / checkstyle).triggeredBy(Compile / compile).value,
90+
// (Test / checkstyle) := (Test / checkstyle).triggeredBy(Test / compile).value
9191
)
9292

9393
lazy val releaseSettings = Seq(
@@ -814,7 +814,60 @@ lazy val defaultCore = (project in file("default-core"))
814814
commonSettings,
815815
skipReleaseSettings,
816816
libraryDependencies ++= Seq(
817-
818-
"com.fasterxml.jackson.core" % "jackson-databind" % "2.14.2" // ObjectMapper
817+
"org.apache.hadoop" % "hadoop-client" % "3.1.0", // Configuration, Path
818+
"io.delta" % "delta-storage" % "2.2.0", // LogStore
819+
"com.fasterxml.jackson.core" % "jackson-databind" % "2.14.2", // ObjectMapper
820+
821+
/*
822+
// ---------------------------------------------------------------
823+
// ------ COPIED VERBATIM FROM DELTA LAKE ON SPARK BUILD.SBT -----
824+
// ---------------------------------------------------------------
825+
// IntelliJ output
826+
// A full rebuild may help if 'SparkFunSuite.class' was compiled against an incompatible version of org.scalatest.
827+
// SBT output
828+
// [error] /home/scott.sandre/connectors/default-core/src/test/scala/io/delta/core/TableSuite.scala:23:41: Symbol 'type org.scalatest.FunSuite' is missing from the classpath.
829+
// [error] This symbol is required by 'class org.apache.spark.SparkFunSuite'.
830+
// [error] Make sure that type FunSuite is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
831+
// [error] A full rebuild may help if 'SparkFunSuite.class' was compiled against an incompatible version of org.scalatest.
832+
// [error] class TableSuite extends QueryTest with SharedSparkSession {
833+
// [error] ^
834+
// [error] /home/scott.sandre/connectors/default-core/src/test/scala/io/delta/core/TableSuite.scala:29:3: package test is not a value
835+
// [error] test("basic read") {
836+
// Adding test classifier seems to break transitive resolution of the core dependencies
837+
"org.apache.spark" %% "spark-hive" % sparkVersion % "provided",
838+
"org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
839+
"org.apache.spark" %% "spark-core" % sparkVersion % "provided",
840+
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "provided",
841+
842+
// Test deps
843+
"org.scalatest" %% "scalatest" % "3.2.9" % "test",
844+
"org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test",
845+
"junit" % "junit" % "4.12" % "test",
846+
"com.novocode" % "junit-interface" % "0.11" % "test",
847+
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
848+
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
849+
"org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests",
850+
"org.apache.spark" %% "spark-hive" % sparkVersion % "test" classifier "tests",
851+
*/
852+
853+
/*
854+
// ------------------------------------------------
855+
// ------ COPIED VERBATIM FROM GOLDEN TABLES -----
856+
// ------------------------------------------------
857+
// IntelliJ output (when running test)
858+
// An exception or error caused a run to abort: 'void org.apache.hadoop.security.HadoopKerberosName.setRuleMechanism(java.lang.String)'
859+
// java.lang.NoSuchMethodError: 'void org.apache.hadoop.security.HadoopKerberosName.setRuleMechanism(java.lang.String)'
860+
// SBT output
861+
// Test Dependencies (when running test)
862+
// 23/03/24 22:07:48 WARN SparkSession: Cannot use io.delta.sql.DeltaSparkSessionExtension to configure session extensions.
863+
// java.lang.ClassNotFoundException: io.delta.sql.DeltaSparkSessionExtension
864+
"org.scalatest" %% "scalatest" % "3.1.0" % "test",
865+
"org.apache.spark" % "spark-sql_2.12" % "3.2.0" % "test",
866+
"io.delta" % "delta-core_2.12" % "1.1.0" % "test",
867+
"commons-io" % "commons-io" % "2.8.0" % "test",
868+
"org.apache.spark" % "spark-catalyst_2.12" % "3.2.0" % "test" classifier "tests",
869+
"org.apache.spark" % "spark-core_2.12" % "3.2.0" % "test" classifier "tests",
870+
"org.apache.spark" % "spark-sql_2.12" % "3.2.0" % "test" classifier "tests"
871+
*/
819872
)
820873
)

0 commit comments

Comments
 (0)