Skip to content

Commit

Permalink
Update dependency versions to match Spark 3.3; update Spark versions;…
Browse files Browse the repository at this point in the history
… updates for 0.15.0 release (#583)

* Update dependency versions to match Spark 3.3; update Spark versions; updates for 0.15.0 release
* Remove scoverage for compatibility reasons
  • Loading branch information
srowen authored Jun 3, 2022
1 parent 1e25d7b commit f4d592b
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 49 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/test_spark_3_2_java_11.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ jobs:
with:
java-version: 'adopt@1.11'
- name: Build and test
run: sbt -Dspark.testVersion=3.2.0 ++2.13.5 clean test
run: sbt -Dspark.testVersion=3.2.1 ++2.13.8 clean mimaReportBinaryIssues test
6 changes: 1 addition & 5 deletions .github/workflows/test_spark_3_java_8.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,4 @@ jobs:
with:
java-version: 'adopt@1.8'
- name: Build and test
run: sbt -Dspark.testVersion=3.0.3 ++2.12.10 clean scalastyle test:scalastyle mimaReportBinaryIssues coverage test coverageReport
- name: Check code coverage
uses: codecov/codecov-action@v2
with:
fail_ci_if_error: true
run: sbt -Dspark.testVersion=3.1.3 ++2.12.15 clean scalastyle test:scalastyle mimaReportBinaryIssues test
10 changes: 4 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
# XML Data Source for Apache Spark

[![codecov](https://codecov.io/gh/databricks/spark-xml/branch/master/graph/badge.svg)](https://codecov.io/gh/databricks/spark-xml)

- A library for parsing and querying XML data with [Apache Spark](https://spark.apache.org), for Spark SQL and DataFrames.
The structure and test tools are mostly copied from [CSV Data Source for Spark](https://github.com/databricks/spark-csv).

Expand All @@ -16,15 +14,15 @@ You can link against this library in your program at the following coordinates:
```
groupId: com.databricks
artifactId: spark-xml_2.12
version: 0.14.0
version: 0.15.0
```

## Using with Spark shell

This package can be added to Spark using the `--packages` command line option. For example, to include it when starting the spark shell:

```
$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.14.0
$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.15.0
```

## Features
Expand Down Expand Up @@ -399,7 +397,7 @@ Automatically infer schema (data types)
```R
library(SparkR)

sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.14.0"))
sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.15.0"))

df <- read.df("books.xml", source = "xml", rowTag = "book")

Expand All @@ -411,7 +409,7 @@ You can manually specify schema:
```R
library(SparkR)

sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.14.0"))
sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.12:0.15.0"))
customSchema <- structType(
structField("_id", "string"),
structField("author", "string"),
Expand Down
44 changes: 10 additions & 34 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@
name := "spark-xml"

version := "0.14.0"
version := "0.15.0"

organization := "com.databricks"

scalaVersion := "2.12.10"
scalaVersion := "2.12.15"

crossScalaVersions := Seq("2.12.10", "2.13.5")
crossScalaVersions := Seq("2.12.15", "2.13.8")

scalacOptions := Seq("-unchecked", "-deprecation")

val sparkVersion = sys.props.get("spark.testVersion").getOrElse("3.2.0")
val sparkVersion = sys.props.get("spark.testVersion").getOrElse("3.2.1")

// To avoid packaging it, it's Provided below
autoScalaLibrary := false

libraryDependencies ++= Seq(
"commons-io" % "commons-io" % "2.8.0",
"org.glassfish.jaxb" % "txw2" % "2.3.4",
"org.apache.ws.xmlschema" % "xmlschema-core" % "2.2.5",
"org.slf4j" % "slf4j-api" % "1.7.30" % Provided,
"org.scalatest" %% "scalatest" % "3.2.9" % Test,
"commons-io" % "commons-io" % "2.11.0",
"org.glassfish.jaxb" % "txw2" % "3.0.2",
"org.apache.ws.xmlschema" % "xmlschema-core" % "2.3.0",
"org.slf4j" % "slf4j-api" % "1.7.36" % Provided,
"org.scalatest" %% "scalatest" % "3.2.12" % Test,
"com.novocode" % "junit-interface" % "0.11" % Test,
"org.apache.spark" %% "spark-core" % sparkVersion % Provided,
"org.apache.spark" %% "spark-sql" % sparkVersion % Provided,
Expand Down Expand Up @@ -78,35 +78,11 @@ fork := true
// Prints JUnit tests in output
testOptions in Test := Seq(Tests.Argument(TestFrameworks.JUnit, "-v"))

mimaPreviousArtifacts := Set("com.databricks" %% "spark-xml" % "0.12.0")
mimaPreviousArtifacts := Set("com.databricks" %% "spark-xml" % "0.14.0")

mimaBinaryIssueFilters ++= {
import com.typesafe.tools.mima.core.ProblemFilters.exclude
import com.typesafe.tools.mima.core.DirectMissingMethodProblem
Seq(
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.parsers.StaxXmlParser.convertField"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.parseXmlTimestamp"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.supportedXmlTimestampFormatters"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.parseXmlDate"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.supportedXmlDateFormatters"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.supportedXmlDateFormatters"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.parseXmlDate"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.supportedXmlTimestampFormatters"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.parseXmlTimestamp"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.isTimestamp"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.castTo"),
exclude[DirectMissingMethodProblem](
"com.databricks.spark.xml.util.TypeCast.castTo$default$4")
)
}
4 changes: 1 addition & 3 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,4 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3")

addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.1.1")

addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")

addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.0")

0 comments on commit f4d592b

Please sign in to comment.