Skip to content

Commit

Permalink
[SPARK-51219][SQL] Fix ShowTablesExec.isTempView to work with non-`…
Browse files Browse the repository at this point in the history
…V2SessionCatalog` catalogs

### What changes were proposed in this pull request?

When non buildin catalog is configured (for example, `org.apache.spark.sql.delta.catalog.DeltaCatalog`) and there is a temp table in catalog, running catalog listTable will fail:
```scala
spark.conf.set("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog")

spark.range(0,2).createOrReplaceTempView("abc")
spark.catalog.listTables().show()

// org.apache.spark.sql.catalyst.parser.ParseException:
// [PARSE_EMPTY_STATEMENT] Syntax error, unexpected empty statement. SQLSTATE: 42617 (line 1, pos 0)
//
// == SQL ==
//
// ^^^
```

If default `V2SessionCatalog` catalog is in use, or there are no temp tables, the same command run without issues.

This behavior is due to `ShowTablesExec. isTempView ` method, where only for `V2SessionCatalog` catalogs dedicated `isTempView` is executed. This PR fixes that, by using `session.sessionState.catalog.isTempView` instead.

### Why are the changes needed?

To avoid unnecessary fails when a non-buildin v2 catalog is in use.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

New unit tests.

### Was this patch authored or co-authored using generative AI tooling?

No.

Closes apache#49959 from ostronaut/features/fix-ShowTablesExec-isTempView.

Authored-by: Dima <dimanowq@gmail.com>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
  • Loading branch information
ostronaut authored and cloud-fan committed Feb 18, 2025
1 parent ef0685a commit aa37f89
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.util.StringUtils
import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Identifier, TableCatalog}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.NamespaceHelper
import org.apache.spark.sql.execution.LeafExecNode
import org.apache.spark.util.ArrayImplicits._

/**
* Physical plan node for showing tables.
Expand All @@ -40,17 +41,17 @@ case class ShowTablesExec(
val tables = catalog.listTables(namespace.toArray)
tables.map { table =>
if (pattern.map(StringUtils.filterPattern(Seq(table.name()), _).nonEmpty).getOrElse(true)) {
rows += toCatalystRow(table.namespace().quoted, table.name(), isTempView(table))
rows += toCatalystRow(table.namespace().quoted, table.name(), isTempView(table, catalog))
}
}

rows.toSeq
}

private def isTempView(ident: Identifier): Boolean = {
catalog match {
case s: V2SessionCatalog => s.isTempView(ident)
case _ => false
}
private def isTempView(ident: Identifier, catalog: TableCatalog): Boolean = {
if (CatalogV2Util.isSessionCatalog(catalog)) {
session.sessionState.catalog
.isTempView((ident.namespace() :+ ident.name()).toImmutableArraySeq)
} else false
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow
import org.apache.spark.sql.catalyst.plans.logical.Range
import org.apache.spark.sql.classic.Catalog
import org.apache.spark.sql.connector.FakeV2Provider
import org.apache.spark.sql.connector.{FakeV2Provider, InMemoryTableSessionCatalog}
import org.apache.spark.sql.connector.catalog.{CatalogManager, Identifier, InMemoryCatalog}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper
import org.apache.spark.sql.connector.catalog.functions._
Expand Down Expand Up @@ -271,6 +271,16 @@ class CatalogSuite extends SharedSparkSession with AnalysisTest with BeforeAndAf
Set("testcat.my_db.my_table2"))
}

test("SPARK-51219: list tables with non-buildin V2 catalog") {
withSQLConf(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION.key ->
classOf[InMemoryTableSessionCatalog].getName) {
createTable("my_table")
createTempTable("my_temp_table")
assert(spark.catalog.listTables().collect().map(_.name).toSet ==
Set("my_table", "my_temp_table"))
}
}

test("list tables with database") {
assert(spark.catalog.listTables("default").collect().isEmpty)
createDatabase("my_db1")
Expand Down

0 comments on commit aa37f89

Please sign in to comment.