From 9a9575b766d0cf2b8a1fa795e77fda6cace33600 Mon Sep 17 00:00:00 2001 From: cty123 Date: Fri, 7 Nov 2025 18:00:45 -0500 Subject: [PATCH 1/5] Supports Decimal type data in SparkConnectResultSet. --- .../client/jdbc/SparkConnectResultSet.scala | 12 ++++++++--- .../client/jdbc/util/JdbcTypeUtils.scala | 9 +++++++- .../jdbc/SparkConnectJdbcDataTypeSuite.scala | 21 +++++++++++++++++++ 3 files changed, 38 insertions(+), 4 deletions(-) diff --git a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala index 0745ddc099111..23c2315400fff 100644 --- a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala +++ b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectResultSet.scala @@ -257,11 +257,17 @@ class SparkConnectResultSet( override def getCharacterStream(columnLabel: String): Reader = throw new SQLFeatureNotSupportedException - override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = - throw new SQLFeatureNotSupportedException + override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = { + if (currentRow.isNullAt(columnIndex - 1)) { + _wasNull = true + return null + } + _wasNull = false + currentRow.getDecimal(columnIndex - 1) + } override def getBigDecimal(columnLabel: String): java.math.BigDecimal = - throw new SQLFeatureNotSupportedException + getBigDecimal(findColumn(columnLabel)) override def isBeforeFirst: Boolean = { checkOpen() diff --git a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala index 55e3d29c99a5e..8417375ad77fe 100644 --- a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala +++ b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala @@ -18,6 +18,7 @@ package org.apache.spark.sql.connect.client.jdbc.util import java.lang.{Boolean => JBoolean, Byte => JByte, Double => JDouble, Float => JFloat, Long => JLong, Short => JShort} +import java.math.{BigDecimal => JBigDecimal} import java.sql.{Array => _, _} import org.apache.spark.sql.types._ @@ -34,6 +35,7 @@ private[jdbc] object JdbcTypeUtils { case FloatType => Types.FLOAT case DoubleType => Types.DOUBLE case StringType => Types.VARCHAR + case DecimalType.Fixed(_, _) => Types.DECIMAL case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } @@ -48,12 +50,14 @@ private[jdbc] object JdbcTypeUtils { case FloatType => classOf[JFloat].getName case DoubleType => classOf[JDouble].getName case StringType => classOf[String].getName + case DecimalType.Fixed(_, _) => classOf[JBigDecimal].getName case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } def isSigned(field: StructField): Boolean = field.dataType match { - case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType => true + case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType + | DecimalType.Fixed(_, _) => true case NullType | BooleanType | StringType => false case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") @@ -69,6 +73,7 @@ private[jdbc] object JdbcTypeUtils { case FloatType => 7 case DoubleType => 15 case StringType => 255 + case DecimalType.Fixed(p, _) => p case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } @@ -77,6 +82,7 @@ private[jdbc] object JdbcTypeUtils { case FloatType => 7 case DoubleType => 15 case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | StringType => 0 + case DecimalType.Fixed(_, s) => s case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } @@ -90,6 +96,7 @@ private[jdbc] object JdbcTypeUtils { case DoubleType => 24 case StringType => getPrecision(field) + case DecimalType.Fixed(p, s) => p + (if (s == 0) 0 else 1) case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } diff --git a/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala b/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala index 619b279310eb3..bc4e1c075a229 100644 --- a/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala +++ b/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala @@ -215,4 +215,25 @@ class SparkConnectJdbcDataTypeSuite extends ConnectFunSuite with RemoteSparkSess assert(metaData.getColumnDisplaySize(1) === 255) } } + + test("get decimal type") { + withExecuteQuery("SELECT cast('123.45' as DECIMAL(37, 2))") { rs => + assert(rs.next()) + assert(rs.getBigDecimal(1) === new java.math.BigDecimal("123.45")) + assert(!rs.wasNull) + assert(!rs.next()) + + val metaData = rs.getMetaData + assert(metaData.getColumnCount === 1) + assert(metaData.getColumnName(1) === "CAST(123.45 AS DECIMAL(37,2))") + assert(metaData.getColumnLabel(1) === "CAST(123.45 AS DECIMAL(37,2))") + assert(metaData.getColumnType(1) === Types.DECIMAL) + assert(metaData.getColumnTypeName(1) === "DECIMAL(37,2)") + assert(metaData.getColumnClassName(1) === "java.math.BigDecimal") + assert(metaData.isSigned(1) === true) + assert(metaData.getPrecision(1) === 37) + assert(metaData.getScale(1) === 2) + assert(metaData.getColumnDisplaySize(1) === 38) + } + } } From 59105b454cf1b9c9720f8f089112f8c162c18c2f Mon Sep 17 00:00:00 2001 From: cty Date: Sat, 8 Nov 2025 13:23:37 -0500 Subject: [PATCH 2/5] Update sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala Co-authored-by: Cheng Pan --- .../spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala index 8417375ad77fe..e8281047e8983 100644 --- a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala +++ b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala @@ -56,8 +56,8 @@ private[jdbc] object JdbcTypeUtils { } def isSigned(field: StructField): Boolean = field.dataType match { - case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType - | DecimalType.Fixed(_, _) => true + case ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType | + _: DecimalType => true case NullType | BooleanType | StringType => false case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") From f80ecf90c10e4ecd979b4bab2012153104d00cfb Mon Sep 17 00:00:00 2001 From: cty123 Date: Sat, 8 Nov 2025 14:48:53 -0500 Subject: [PATCH 3/5] update edge cases of the column display size calculation --- .../client/jdbc/util/JdbcTypeUtils.scala | 7 ++- .../jdbc/SparkConnectJdbcDataTypeSuite.scala | 46 ++++++++++++------- 2 files changed, 35 insertions(+), 18 deletions(-) diff --git a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala index e8281047e8983..b0e127cd2488d 100644 --- a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala +++ b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala @@ -96,7 +96,12 @@ private[jdbc] object JdbcTypeUtils { case DoubleType => 24 case StringType => getPrecision(field) - case DecimalType.Fixed(p, s) => p + (if (s == 0) 0 else 1) + // precision + sign(+/-) + leading zero + decimal point, like DECIMAL(5,5) = -0.12345 + case DecimalType.Fixed(p, s) if p == s => p + 3 + // precision + sign(+/-), like DECIMAL(5,0) = -12345 + case DecimalType.Fixed(p, s) if s == 0 => p + 1 + // precision + sign(+/-) + decimal point, like DECIMAL(5,2) = -123.45 + case DecimalType.Fixed(p, s) => p + 2 case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } diff --git a/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala b/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala index bc4e1c075a229..089c1d7fdf0d4 100644 --- a/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala +++ b/sql/connect/client/jdbc/src/test/scala/org/apache/spark/sql/connect/client/jdbc/SparkConnectJdbcDataTypeSuite.scala @@ -217,23 +217,35 @@ class SparkConnectJdbcDataTypeSuite extends ConnectFunSuite with RemoteSparkSess } test("get decimal type") { - withExecuteQuery("SELECT cast('123.45' as DECIMAL(37, 2))") { rs => - assert(rs.next()) - assert(rs.getBigDecimal(1) === new java.math.BigDecimal("123.45")) - assert(!rs.wasNull) - assert(!rs.next()) - - val metaData = rs.getMetaData - assert(metaData.getColumnCount === 1) - assert(metaData.getColumnName(1) === "CAST(123.45 AS DECIMAL(37,2))") - assert(metaData.getColumnLabel(1) === "CAST(123.45 AS DECIMAL(37,2))") - assert(metaData.getColumnType(1) === Types.DECIMAL) - assert(metaData.getColumnTypeName(1) === "DECIMAL(37,2)") - assert(metaData.getColumnClassName(1) === "java.math.BigDecimal") - assert(metaData.isSigned(1) === true) - assert(metaData.getPrecision(1) === 37) - assert(metaData.getScale(1) === 2) - assert(metaData.getColumnDisplaySize(1) === 38) + Seq( + ("123.45", 37, 2, 39), + ("-0.12345", 5, 5, 8), + ("-0.12345", 6, 5, 8), + ("-123.45", 5, 2, 7), + ("12345", 5, 0, 6), + ("-12345", 5, 0, 6) + ).foreach { + case (value, precision, scale, expectedColumnDisplaySize) => + val decimalType = s"DECIMAL($precision,$scale)" + withExecuteQuery(s"SELECT cast('$value' as $decimalType)") { rs => + assert(rs.next()) + assert(rs.getBigDecimal(1) === new java.math.BigDecimal(value)) + assert(!rs.wasNull) + assert(!rs.next()) + + val metaData = rs.getMetaData + assert(metaData.getColumnCount === 1) + assert(metaData.getColumnName(1) === s"CAST($value AS $decimalType)") + assert(metaData.getColumnLabel(1) === s"CAST($value AS $decimalType)") + assert(metaData.getColumnType(1) === Types.DECIMAL) + assert(metaData.getColumnTypeName(1) === decimalType) + assert(metaData.getColumnClassName(1) === "java.math.BigDecimal") + assert(metaData.isSigned(1) === true) + assert(metaData.getPrecision(1) === precision) + assert(metaData.getScale(1) === scale) + assert(metaData.getColumnDisplaySize(1) === expectedColumnDisplaySize) + assert(metaData.getColumnDisplaySize(1) >= value.size) + } } } } From f8b5bd6c5fbdafdba80a0808bfe7f98d8e9f1d34 Mon Sep 17 00:00:00 2001 From: cty123 Date: Sat, 8 Nov 2025 17:07:36 -0500 Subject: [PATCH 4/5] polish code comments and drop an unsed variable. --- .../sql/connect/client/jdbc/util/JdbcTypeUtils.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala index b0e127cd2488d..d35ebd51e3fe3 100644 --- a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala +++ b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala @@ -96,12 +96,12 @@ private[jdbc] object JdbcTypeUtils { case DoubleType => 24 case StringType => getPrecision(field) - // precision + sign(+/-) + leading zero + decimal point, like DECIMAL(5,5) = -0.12345 + // precision + negative sign + leading zero + decimal point, like DECIMAL(5,5) = -0.12345 case DecimalType.Fixed(p, s) if p == s => p + 3 - // precision + sign(+/-), like DECIMAL(5,0) = -12345 + // precision + negative sign, like DECIMAL(5,0) = -12345 case DecimalType.Fixed(p, s) if s == 0 => p + 1 - // precision + sign(+/-) + decimal point, like DECIMAL(5,2) = -123.45 - case DecimalType.Fixed(p, s) => p + 2 + // precision + negative sign + decimal point, like DECIMAL(5,2) = -123.45 + case DecimalType.Fixed(p, _) => p + 2 case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } From 39dbf000f3e9729d8c762f6eaa1ebda3086bc02b Mon Sep 17 00:00:00 2001 From: cty123 Date: Sat, 8 Nov 2025 17:18:18 -0500 Subject: [PATCH 5/5] small improvement on decimal type matching --- .../spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala index d35ebd51e3fe3..c2b27128caa72 100644 --- a/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala +++ b/sql/connect/client/jdbc/src/main/scala/org/apache/spark/sql/connect/client/jdbc/util/JdbcTypeUtils.scala @@ -35,7 +35,7 @@ private[jdbc] object JdbcTypeUtils { case FloatType => Types.FLOAT case DoubleType => Types.DOUBLE case StringType => Types.VARCHAR - case DecimalType.Fixed(_, _) => Types.DECIMAL + case _: DecimalType => Types.DECIMAL case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") } @@ -50,7 +50,7 @@ private[jdbc] object JdbcTypeUtils { case FloatType => classOf[JFloat].getName case DoubleType => classOf[JDouble].getName case StringType => classOf[String].getName - case DecimalType.Fixed(_, _) => classOf[JBigDecimal].getName + case _: DecimalType => classOf[JBigDecimal].getName case other => throw new SQLFeatureNotSupportedException(s"DataType $other is not supported yet.") }