Skip to content

Commit 6d5549c

Browse files
authored
fix maptype doesn't use the frameless injection properly (#421)
1 parent 3eab686 commit 6d5549c

File tree

6 files changed

+24
-4
lines changed

6 files changed

+24
-4
lines changed

sparksql-scalapb/src/main/scala/scalapb/spark/FromCatalystHelpers.scala

+1-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ trait FromCatalystHelpers {
9797
input,
9898
(in: Expression) => singleFieldValueFromCatalyst(mapEntryCmp, keyDesc, in),
9999
(in: Expression) => singleFieldValueFromCatalyst(mapEntryCmp, valDesc, in),
100-
ProtoSQL.dataTypeFor(fd).asInstanceOf[MapType],
100+
protoSql.dataTypeFor(fd).asInstanceOf[MapType],
101101
classOf[Vector[(Any, Any)]]
102102
)
103103
val objs = MyCatalystToExternalMap(urobjs)

sparksql-scalapb/src/test/protobuf/customizations.proto

+4
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,7 @@ message BothTimestampTypes {
2222
google.protobuf.Timestamp google_ts = 1;
2323
google.protobuf.Timestamp google_ts_as_sql_ts = 2 [(scalapb.field).type = "java.sql.Timestamp"];
2424
}
25+
26+
message TimestampTypesMap {
27+
map<string, SQLTimestampFromGoogleTimestamp> map_field = 1;
28+
}

sparksql-scalapb/src/test/scala/PersonSpec.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ class PersonSpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll {
310310
}
311311

312312
"UDFs that returns protos" should "work when reading local files" in {
313-
val df = spark.read.json("./sparksql-scalapb/src/test/assets/address.json")
313+
val df = spark.read.json(getClass.getResource("/address.json").toURI.toString)
314314

315315
val returnAddress = ProtoSQL.udf { s: String => Address() }
316316

@@ -349,7 +349,7 @@ class PersonSpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll {
349349
"parsing null repeated from json" should "work" in {
350350
spark.read
351351
.schema(ProtoSQL.schemaFor[Person].asInstanceOf[types.StructType])
352-
.json("./sparksql-scalapb/src/test/assets/person_null_repeated.json")
352+
.json(getClass.getResource("/person_null_repeated.json").toURI.toString)
353353
.as[Person]
354354
.collect() must contain theSameElementsAs Seq(
355355
Person().withTags(Seq("foo", "bar")),

sparksql-scalapb/src/test/scala/TimestampSpec.scala

+17-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,8 @@ import org.scalatest.matchers.must.Matchers
88
import scalapb.spark.test3.customizations.{
99
BothTimestampTypes,
1010
SQLTimestampFromGoogleTimestamp,
11-
StructFromGoogleTimestamp
11+
StructFromGoogleTimestamp,
12+
TimestampTypesMap
1213
}
1314

1415
import java.sql.{Timestamp => SQLTimestamp}
@@ -158,6 +159,21 @@ class TimestampSpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll {
158159
)
159160
}
160161

162+
"spark.createDataset from proto messages with spark timestamp in map" should "be able to convert items with correct timestamp values" in {
163+
import ProtoSQL.withSparkTimestamps.implicits._
164+
165+
val value = TimestampTypesMap(mapField =
166+
Map(
167+
"a" -> SQLTimestampFromGoogleTimestamp(googleTsAsSqlTs = Some(sqlTimestampMicrosPrecision))
168+
)
169+
)
170+
val ds: Dataset[TimestampTypesMap] = spark.createDataset(Seq(value))
171+
172+
ds.collect() must contain theSameElementsAs Seq(
173+
value
174+
)
175+
}
176+
161177
"df with case class timestamp as well as both types of google timestamp" should "not have StructType for timestamps" in {
162178
import ProtoSQL.withSparkTimestamps.implicits._
163179

0 commit comments

Comments
 (0)