Skip to content
This repository was archived by the owner on Mar 24, 2025. It is now read-only.

Commit f4d592b

Browse files
authored
Update dependency versions to match Spark 3.3; update Spark versions; updates for 0.15.0 release (#583)
* Update dependency versions to match Spark 3.3; update Spark versions; updates for 0.15.0 release * Remove scoverage for compatibility reasons
1 parent 1e25d7b commit f4d592b

File tree

5 files changed

+17
-49
lines changed

5 files changed

+17
-49
lines changed

.github/workflows/test_spark_3_2_java_11.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,4 @@ jobs:
1414
with:
1515
java-version: '[email protected]'
1616
- name: Build and test
17-
run: sbt -Dspark.testVersion=3.2.0 ++2.13.5 clean test
17+
run: sbt -Dspark.testVersion=3.2.1 ++2.13.8 clean mimaReportBinaryIssues test

.github/workflows/test_spark_3_java_8.yml

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,4 @@ jobs:
1414
with:
1515
java-version: '[email protected]'
1616
- name: Build and test
17-
run: sbt -Dspark.testVersion=3.0.3 ++2.12.10 clean scalastyle test:scalastyle mimaReportBinaryIssues coverage test coverageReport
18-
- name: Check code coverage
19-
uses: codecov/codecov-action@v2
20-
with:
21-
fail_ci_if_error: true
17+
run: sbt -Dspark.testVersion=3.1.3 ++2.12.15 clean scalastyle test:scalastyle mimaReportBinaryIssues test

README.md

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
# XML Data Source for Apache Spark
22

3-
[![codecov](https://codecov.io/gh/databricks/spark-xml/branch/master/graph/badge.svg)](https://codecov.io/gh/databricks/spark-xml)
4-
53
- A library for parsing and querying XML data with [Apache Spark](https://spark.apache.org), for Spark SQL and DataFrames.
64
The structure and test tools are mostly copied from [CSV Data Source for Spark](https://github.com/databricks/spark-csv).
75

@@ -16,15 +14,15 @@ You can link against this library in your program at the following coordinates:
1614
```
1715
groupId: com.databricks
1816
artifactId: spark-xml_2.12
19-
version: 0.14.0
17+
version: 0.15.0
2018
```
2119

2220
## Using with Spark shell
2321

2422
This package can be added to Spark using the `--packages` command line option. For example, to include it when starting the spark shell:
2523

2624
```
27-
$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.14.0
25+
$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.15.0
2826
```
2927

3028
## Features
@@ -399,7 +397,7 @@ Automatically infer schema (data types)
399397
```R
400398
library(SparkR)
401399

402-
sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.14.0"))
400+
sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.15.0"))
403401

404402
df <- read.df("books.xml", source = "xml", rowTag = "book")
405403

@@ -411,7 +409,7 @@ You can manually specify schema:
411409
```R
412410
library(SparkR)
413411

414-
sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.14.0"))
412+
sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.15.0"))
415413
customSchema <- structType(
416414
structField("_id", "string"),
417415
structField("author", "string"),

build.sbt

Lines changed: 10 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
11
name := "spark-xml"
22

3-
version := "0.14.0"
3+
version := "0.15.0"
44

55
organization := "com.databricks"
66

7-
scalaVersion := "2.12.10"
7+
scalaVersion := "2.12.15"
88

9-
crossScalaVersions := Seq("2.12.10", "2.13.5")
9+
crossScalaVersions := Seq("2.12.15", "2.13.8")
1010

1111
scalacOptions := Seq("-unchecked", "-deprecation")
1212

13-
val sparkVersion = sys.props.get("spark.testVersion").getOrElse("3.2.0")
13+
val sparkVersion = sys.props.get("spark.testVersion").getOrElse("3.2.1")
1414

1515
// To avoid packaging it, it's Provided below
1616
autoScalaLibrary := false
1717

1818
libraryDependencies ++= Seq(
19-
"commons-io" % "commons-io" % "2.8.0",
20-
"org.glassfish.jaxb" % "txw2" % "2.3.4",
21-
"org.apache.ws.xmlschema" % "xmlschema-core" % "2.2.5",
22-
"org.slf4j" % "slf4j-api" % "1.7.30" % Provided,
23-
"org.scalatest" %% "scalatest" % "3.2.9" % Test,
19+
"commons-io" % "commons-io" % "2.11.0",
20+
"org.glassfish.jaxb" % "txw2" % "3.0.2",
21+
"org.apache.ws.xmlschema" % "xmlschema-core" % "2.3.0",
22+
"org.slf4j" % "slf4j-api" % "1.7.36" % Provided,
23+
"org.scalatest" %% "scalatest" % "3.2.12" % Test,
2424
"com.novocode" % "junit-interface" % "0.11" % Test,
2525
"org.apache.spark" %% "spark-core" % sparkVersion % Provided,
2626
"org.apache.spark" %% "spark-sql" % sparkVersion % Provided,
@@ -78,35 +78,11 @@ fork := true
7878
// Prints JUnit tests in output
7979
testOptions in Test := Seq(Tests.Argument(TestFrameworks.JUnit, "-v"))
8080

81-
mimaPreviousArtifacts := Set("com.databricks" %% "spark-xml" % "0.12.0")
81+
mimaPreviousArtifacts := Set("com.databricks" %% "spark-xml" % "0.14.0")
8282

8383
mimaBinaryIssueFilters ++= {
8484
import com.typesafe.tools.mima.core.ProblemFilters.exclude
8585
import com.typesafe.tools.mima.core.DirectMissingMethodProblem
8686
Seq(
87-
exclude[DirectMissingMethodProblem](
88-
"com.databricks.spark.xml.parsers.StaxXmlParser.convertField"),
89-
exclude[DirectMissingMethodProblem](
90-
"com.databricks.spark.xml.util.TypeCast.parseXmlTimestamp"),
91-
exclude[DirectMissingMethodProblem](
92-
"com.databricks.spark.xml.util.TypeCast.supportedXmlTimestampFormatters"),
93-
exclude[DirectMissingMethodProblem](
94-
"com.databricks.spark.xml.util.TypeCast.parseXmlDate"),
95-
exclude[DirectMissingMethodProblem](
96-
"com.databricks.spark.xml.util.TypeCast.supportedXmlDateFormatters"),
97-
exclude[DirectMissingMethodProblem](
98-
"com.databricks.spark.xml.util.TypeCast.supportedXmlDateFormatters"),
99-
exclude[DirectMissingMethodProblem](
100-
"com.databricks.spark.xml.util.TypeCast.parseXmlDate"),
101-
exclude[DirectMissingMethodProblem](
102-
"com.databricks.spark.xml.util.TypeCast.supportedXmlTimestampFormatters"),
103-
exclude[DirectMissingMethodProblem](
104-
"com.databricks.spark.xml.util.TypeCast.parseXmlTimestamp"),
105-
exclude[DirectMissingMethodProblem](
106-
"com.databricks.spark.xml.util.TypeCast.isTimestamp"),
107-
exclude[DirectMissingMethodProblem](
108-
"com.databricks.spark.xml.util.TypeCast.castTo"),
109-
exclude[DirectMissingMethodProblem](
110-
"com.databricks.spark.xml.util.TypeCast.castTo$default$4")
11187
)
11288
}

project/plugins.sbt

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,4 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3")
88

99
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.1.1")
1010

11-
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
12-
13-
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1")
11+
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0")

0 commit comments

Comments
 (0)