@@ -797,91 +797,32 @@ lazy val flink = (project in file("flink"))
797797 (Test / test) := ((Test / test) dependsOn (Compile / unidoc)).value
798798 )
799799
800- lazy val core = (project in file(" core " ))
800+ lazy val kernel = (project in file(" kernel " ))
801801 .settings(
802- name := " delta-core " ,
802+ name := " delta-kernel " ,
803803 commonSettings,
804804 skipReleaseSettings,
805805 libraryDependencies ++= Seq (
806806
807807 )
808808 )
809809
810- lazy val defaultCore = (project in file(" default-core " ))
811- .dependsOn(core )
810+ lazy val kernelDefault = (project in file(" kernel-default " ))
811+ .dependsOn(kernel )
812812 .settings(
813- name := " delta-core -default" ,
813+ name := " delta-kernel -default" ,
814814 commonSettings,
815815 skipReleaseSettings,
816816 libraryDependencies ++= Seq (
817817 " org.apache.hadoop" % " hadoop-client-api" % " 3.3.1" , // Configuration, Path
818818 " io.delta" % " delta-storage" % " 2.2.0" , // LogStore
819- " com.fasterxml.jackson.core" % " jackson-databind" % " 2.14.2" , // ObjectMapper
820-
821- /*
822- // ---------------------------------------------------------------
823- // ------ COPIED VERBATIM FROM DELTA LAKE ON SPARK BUILD.SBT -----
824- // ---------------------------------------------------------------
825- // IntelliJ output
826- // A full rebuild may help if 'SparkFunSuite.class' was compiled against an incompatible version of org.scalatest.
827- // SBT output
828- // [error] /home/scott.sandre/connectors/default-core/src/test/scala/io/delta/core/TableSuite.scala:23:41: Symbol 'type org.scalatest.FunSuite' is missing from the classpath.
829- // [error] This symbol is required by 'class org.apache.spark.SparkFunSuite'.
830- // [error] Make sure that type FunSuite is in your classpath and check for conflicting dependencies with `-Ylog-classpath`.
831- // [error] A full rebuild may help if 'SparkFunSuite.class' was compiled against an incompatible version of org.scalatest.
832- // [error] class TableSuite extends QueryTest with SharedSparkSession {
833- // [error] ^
834- // [error] /home/scott.sandre/connectors/default-core/src/test/scala/io/delta/core/TableSuite.scala:29:3: package test is not a value
835- // [error] test("basic read") {
836- // Adding test classifier seems to break transitive resolution of the core dependencies
837- "org.apache.spark" %% "spark-hive" % sparkVersion % "provided",
838- "org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
839- "org.apache.spark" %% "spark-core" % sparkVersion % "provided",
840- "org.apache.spark" %% "spark-catalyst" % sparkVersion % "provided",
841-
842- // Test deps
843- "org.scalatest" %% "scalatest" % "3.2.9" % "test",
844- "org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test",
845- "junit" % "junit" % "4.12" % "test",
846- "com.novocode" % "junit-interface" % "0.11" % "test",
847- "org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
848- "org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
849- "org.apache.spark" %% "spark-sql" % sparkVersion % "test" classifier "tests",
850- "org.apache.spark" %% "spark-hive" % sparkVersion % "test" classifier "tests",
851- */
852-
853- /*
854- // ------------------------------------------------
855- // ------ COPIED VERBATIM FROM GOLDEN TABLES -----
856- // ------------------------------------------------
857- // IntelliJ output (when running test)
858- // An exception or error caused a run to abort: 'void org.apache.hadoop.security.HadoopKerberosName.setRuleMechanism(java.lang.String)'
859- // java.lang.NoSuchMethodError: 'void org.apache.hadoop.security.HadoopKerberosName.setRuleMechanism(java.lang.String)'
860- // SBT output
861- // Test Dependencies (when running test)
862- // 23/03/24 22:07:48 WARN SparkSession: Cannot use io.delta.sql.DeltaSparkSessionExtension to configure session extensions.
863- // java.lang.ClassNotFoundException: io.delta.sql.DeltaSparkSessionExtension
864- "org.scalatest" %% "scalatest" % "3.1.0" % "test",
865- "org.apache.spark" % "spark-sql_2.12" % "3.2.0" % "test",
866- "io.delta" % "delta-core_2.12" % "1.1.0" % "test",
867- "commons-io" % "commons-io" % "2.8.0" % "test",
868- "org.apache.spark" % "spark-catalyst_2.12" % "3.2.0" % "test" classifier "tests",
869- "org.apache.spark" % "spark-core_2.12" % "3.2.0" % "test" classifier "tests",
870- "org.apache.spark" % "spark-sql_2.12" % "3.2.0" % "test" classifier "tests"
871- */
872-
873- // Verbatim from golden tables, trying out different test / classifier tests combinations
874- // test and classifier tests -> Cause: java.lang.ClassNotFoundException: delta.DefaultSource
875- // just test -> object QueryTest is not a member of package org.apache.spark.sql
876- // just classifier tests -> Symbol 'type org.apache.spark.sql.SparkSession' is missing from the classpath.
877- // "io.delta" % "delta-core_2.12" % "1.1.0" % "test",
878- // "org.apache.spark" % "spark-catalyst_2.12" % "3.2.0" % "test",
879- // "org.apache.spark" % "spark-core_2.12" % "3.2.0" % "test",
880- // "org.apache.spark" % "spark-sql_2.12" % "3.2.0" % "test",
881- //
882- // "io.delta" % "delta-core_2.12" % "1.1.0" % "test" classifier "tests",
883- // "org.apache.spark" % "spark-catalyst_2.12" % "3.2.0" % "test" classifier "tests",
884- // "org.apache.spark" % "spark-core_2.12" % "3.2.0" % "test" classifier "tests",
885- // "org.apache.spark" % "spark-sql_2.12" % "3.2.0" % "test" classifier "tests"
819+ " com.fasterxml.jackson.core" % " jackson-databind" % " 2.13.5" , // ObjectMapper
820+
821+ " org.scalatest" %% " scalatest" % " 3.2.15" % " test" ,
822+ " io.delta" %% " delta-core" % " 2.2.0" % " test" ,
823+ " org.apache.spark" %% " spark-sql" % " 3.3.2" % " test" , // SparkSession
824+ " org.apache.spark" %% " spark-sql" % " 3.3.2" % " test" classifier " tests" ,
825+ " org.apache.spark" %% " spark-core" % " 3.3.2" % " test" classifier " tests" ,
826+ " org.apache.spark" %% " spark-catalyst" % " 3.3.2" % " test" classifier " tests" ,
886827 )
887828 )
0 commit comments