Skip to content

Commit

Permalink
Update code examples on Spark installation page
Browse files Browse the repository at this point in the history
  • Loading branch information
johngrimes committed Jan 11, 2024
1 parent 30258ab commit 76e21ab
Showing 1 changed file with 36 additions and 10 deletions.
46 changes: 36 additions & 10 deletions site/docs/libraries/installation/spark.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,30 +28,54 @@ import TabItem from "@theme/TabItem";
<TabItem value="python" label="Python">

```python
from pathling import PathlingContext, find_jar
from pathling import PathlingContext
from pyspark.sql import SparkSession

spark = (
SparkSession.builder
.config("spark.jars", find_jar())
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.2.0")
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
.config("spark.sql.catalog.spark_catalog",
"org.apache.spark.sql.delta.catalog.DeltaCatalog")
.getOrCreate()
SparkSession.builder.config(
"spark.jars.packages",
"au.csiro.pathling:library-runtime:6.4.2,"
"io.delta:delta-core_2.12:2.4.0,"
)
.config(
"spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension"
)
.config(
"spark.sql.catalog.spark_catalog",
"org.apache.spark.sql.delta.catalog.DeltaCatalog",
)
)

pc = PathlingContext.create(spark)
```

</TabItem>
<TabItem value="r" label="R">

```r
library(sparklyr)
library(pathling)

sc <- spark_connect(master = "local",
packages = c(paste("au.csiro.pathling:library-runtime:", pathling_version()),
"io.delta:delta-core_2.12:2.4.0"),
config = list("sparklyr.shell.conf" = c(
"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension",
"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog"
)), version = "3.4.0")

pc <- pathling_connect(sc)
```

</TabItem>
<TabItem value="scala" label="Scala">

```scala
import au.csiro.pathling.library.PathlingContext

val spark = SparkSession.builder
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.2.0")
.config("spark.jars.packages", "au.csiro.pathling:library-runtime:6.4.2," +
"io.delta:delta-core_2.12:2.4.0")
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
.config("spark.sql.catalog.spark_catalog",
"org.apache.spark.sql.delta.catalog.DeltaCatalog")
Expand All @@ -71,7 +95,9 @@ class MyApp {

public static void main(String[] args) {
SparkSession spark = SparkSession.builder()
.config("spark.jars.packages", "io.delta:delta-core_2.12:2.2.0")
.config("spark.jars.packages",
"au.csiro.pathling:library-runtime:6.4.2," +
"io.delta:delta-core_2.12:2.4.0")
.config("spark.sql.extensions",
"io.delta.sql.DeltaSparkSessionExtension")
.config("spark.sql.catalog.spark_catalog",
Expand Down

0 comments on commit 76e21ab

Please sign in to comment.