From 74b98022b320985a5913cd8aaa74c28e6ce54240 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Mon, 7 Dec 2020 10:52:00 -0600 Subject: [PATCH] Update README for 0.11.0 (#507) --- README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 85d6a8ec..7d03ffd2 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ You can link against this library in your program at the following coordinates: ``` groupId: com.databricks artifactId: spark-xml_2.11 -version: 0.10.0 +version: 0.11.0 ``` ### Scala 2.12 @@ -34,7 +34,7 @@ version: 0.10.0 ``` groupId: com.databricks artifactId: spark-xml_2.12 -version: 0.10.0 +version: 0.11.0 ``` ## Using with Spark shell @@ -43,12 +43,12 @@ This package can be added to Spark using the `--packages` command line option. F ### Spark compiled with Scala 2.11 ``` -$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.11:0.10.0 +$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.11:0.11.0 ``` ### Spark compiled with Scala 2.12 ``` -$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.10.0 +$SPARK_HOME/bin/spark-shell --packages com.databricks:spark-xml_2.12:0.11.0 ``` ## Features @@ -409,7 +409,7 @@ Automatically infer schema (data types) ```R library(SparkR) -sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.11:0.10.0")) +sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.11:0.11.0")) df <- read.df("books.xml", source = "xml", rowTag = "book") @@ -421,7 +421,7 @@ You can manually specify schema: ```R library(SparkR) -sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.11:0.10.0")) +sparkR.session("local[4]", sparkPackages = c("com.databricks:spark-xml_2.11:0.11.0")) customSchema <- structType( structField("_id", "string"), structField("author", "string"),