55import io .debezium .testing .testcontainers .ConnectorConfiguration ;
66import io .debezium .testing .testcontainers .DebeziumContainer ;
77import io .questdb .client .Sender ;
8+ import io .questdb .kafka .domain .SensorReading ;
89import io .questdb .kafka .domain .Student ;
910import org .apache .avro .Schema ;
1011import org .apache .avro .generic .GenericData ;
1718import org .jetbrains .annotations .NotNull ;
1819import org .junit .jupiter .api .Test ;
1920import org .junit .jupiter .api .extension .RegisterExtension ;
21+ import org .slf4j .LoggerFactory ;
2022import org .testcontainers .containers .GenericContainer ;
2123import org .testcontainers .containers .KafkaContainer ;
2224import org .testcontainers .containers .Network ;
25+ import org .testcontainers .containers .output .Slf4jLogConsumer ;
2326import org .testcontainers .containers .wait .strategy .HttpWaitStrategy ;
2427import org .testcontainers .containers .wait .strategy .Wait ;
2528import org .testcontainers .junit .jupiter .Container ;
2831import org .testcontainers .utility .MountableFile ;
2932
3033import java .time .Instant ;
34+ import java .util .Arrays ;
3135import java .util .Properties ;
3236
3337import static java .time .Duration .ofMinutes ;
@@ -52,10 +56,10 @@ public class AvroSchemaRegistryIT {
5256 .withEnv ("KAFKA_CONTROLLER_QUORUM_VOTERS" , "0@kafka:9094" );
5357
5458 @ Container
55- private final GenericContainer <?> questDBContainer = new GenericContainer <>("questdb/questdb:7.4.0 " )
59+ private final GenericContainer <?> questDBContainer = new GenericContainer <>("questdb/questdb:9.0.1 " )
5660 .withNetwork (network )
5761 .withExposedPorts (QuestDBUtils .QUESTDB_HTTP_PORT )
58- // .withLogConsumer(new Slf4jLogConsumer(LoggerFactory.getLogger("questdb")))
62+ .withLogConsumer (new Slf4jLogConsumer (LoggerFactory .getLogger ("questdb" )))
5963 .withEnv ("QDB_CAIRO_COMMIT_LAG" , "100" )
6064 .withEnv ("JAVA_OPTS" , "-Djava.locale.providers=JRE,SPI" );
6165
@@ -108,7 +112,7 @@ public void testSmoke() throws Exception {
108112 producer .send (new ProducerRecord <>(topicName , "foo" , student )).get ();
109113 }
110114
111- startConnector (topicName );
115+ startConnector (topicName , "birthday" );
112116 QuestDBUtils .assertSqlEventually ("\" firstname\" ,\" lastname\" ,\" timestamp\" \r \n "
113117 + "\" John\" ,\" Doe\" ,\" 2000-01-01T00:00:00.000000Z\" \r \n " ,
114118 "select * from " + topicName , questDBContainer .getMappedPort (QuestDBUtils .QUESTDB_HTTP_PORT ));
@@ -125,7 +129,7 @@ public void testSchemaEvolution() throws Exception {
125129 .build ();
126130 producer .send (new ProducerRecord <>(topicName , "foo" , student )).get ();
127131 }
128- startConnector (topicName );
132+ startConnector (topicName , "birthday" );
129133
130134 QuestDBUtils .assertSqlEventually ("\" firstname\" ,\" lastname\" ,\" timestamp\" \r \n "
131135 + "\" John\" ,\" Doe\" ,\" 2000-01-01T00:00:00.000000Z\" \r \n " ,
@@ -146,7 +150,41 @@ public void testSchemaEvolution() throws Exception {
146150 "select * from " + topicName , questDBContainer .getMappedPort (QuestDBUtils .QUESTDB_HTTP_PORT ));
147151 }
148152
149- private void startConnector (String topicName ) {
153+ @ Test
154+ public void testAvroRecordsWithArrays () throws Exception {
155+ String topicName = "sensors" ;
156+
157+ // sensor reading with array of double values
158+ try (Producer <String , SensorReading > producer = new KafkaProducer <>(producerProps ())) {
159+ SensorReading reading = SensorReading .newBuilder ()
160+ .setSensorId ("sensor-001" )
161+ .setTimestamp (Instant .parse ("2024-01-01T10:00:00Z" ))
162+ .setValues (Arrays .asList (22.5 , 23.1 , 22.8 , 23.3 , 22.9 ))
163+ .setLocation ("Building A" )
164+ .build ();
165+ producer .send (new ProducerRecord <>(topicName , "key1" , reading )).get ();
166+
167+ // Send another reading
168+ SensorReading reading2 = SensorReading .newBuilder ()
169+ .setSensorId ("sensor-002" )
170+ .setTimestamp (Instant .parse ("2024-01-01T10:05:00Z" ))
171+ .setValues (Arrays .asList (18.2 , 18.5 , 18.3 ))
172+ .setLocation (null )
173+ .build ();
174+ producer .send (new ProducerRecord <>(topicName , "key2" , reading2 )).get ();
175+ }
176+
177+ startConnector (topicName , "timestamp" );
178+
179+ QuestDBUtils .assertSqlEventually (
180+ "\" sensorId\" ,\" values\" ,\" location\" ,\" timestamp\" \r \n " +
181+ "\" sensor-001\" ,\" [22.5,23.1,22.8,23.3,22.9]\" ,\" Building A\" ,\" 2024-01-01T10:00:00.000000Z\" \r \n " +
182+ "\" sensor-002\" ,\" [18.2,18.5,18.3]\" ,,\" 2024-01-01T10:05:00.000000Z\" \r \n " ,
183+ "select sensorId, \" values\" , location, timestamp from " + topicName + " order by timestamp" ,
184+ questDBContainer .getMappedPort (QuestDBUtils .QUESTDB_HTTP_PORT ));
185+ }
186+
187+ private void startConnector (String topicName , String timestampName ) {
150188 String confString = "http::addr=" + questDBContainer .getNetworkAliases ().get (0 ) + ":" + QuestDBUtils .QUESTDB_HTTP_PORT + ";auto_flush_rows=1;" ;
151189 ConnectorConfiguration connector = ConnectorConfiguration .create ()
152190 .with ("connector.class" , QuestDBSinkConnector .class .getName ())
@@ -155,7 +193,7 @@ private void startConnector(String topicName) {
155193 .with ("value.converter" , "io.confluent.connect.avro.AvroConverter" )
156194 .with ("value.converter.schema.registry.url" , "http://" + schemaRegistry .getNetworkAliases ().get (0 ) + ":8081" )
157195 .with ("topics" , topicName )
158- .with (QuestDBSinkConnectorConfig .DESIGNATED_TIMESTAMP_COLUMN_NAME_CONFIG , "birthday" )
196+ .with (QuestDBSinkConnectorConfig .DESIGNATED_TIMESTAMP_COLUMN_NAME_CONFIG , timestampName )
159197 .with (QuestDBSinkConnectorConfig .INCLUDE_KEY_CONFIG , "false" )
160198 .with ("client.conf.string" , confString );
161199 connectContainer .registerConnector ("my-connector" , connector );
0 commit comments