@@ -2250,4 +2250,153 @@ public void testArrayWithSkipUnsupportedTypes() {
22502250 httpPort
22512251 );
22522252 }
2253+
2254+ @ ParameterizedTest
2255+ @ ValueSource (booleans = {true , false })
2256+ public void test2DDoubleArraySupport (boolean useHttp ) {
2257+ connect .kafka ().createTopic (topicName , 1 );
2258+ Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , useHttp );
2259+ props .put (VALUE_CONVERTER_CLASS_CONFIG , JsonConverter .class .getName ());
2260+ connect .configureConnector (ConnectTestUtils .CONNECTOR_NAME , props );
2261+ ConnectTestUtils .assertConnectorTaskRunningEventually (connect );
2262+
2263+ // Create schema with 2D double array
2264+ Schema innerArraySchema = SchemaBuilder .array (Schema .FLOAT64_SCHEMA ).build ();
2265+ Schema arraySchema = SchemaBuilder .array (innerArraySchema ).build ();
2266+ Schema schema = SchemaBuilder .struct ()
2267+ .name ("com.example.Matrix" )
2268+ .field ("matrix_id" , Schema .STRING_SCHEMA )
2269+ .field ("data" , arraySchema )
2270+ .build ();
2271+
2272+ // Create 2D array data: [[1.0, 2.0], [3.0, 4.0]]
2273+ Struct struct = new Struct (schema )
2274+ .put ("matrix_id" , "matrix1" )
2275+ .put ("data" , Arrays .asList (
2276+ Arrays .asList (1.0 , 2.0 ),
2277+ Arrays .asList (3.0 , 4.0 )
2278+ ));
2279+
2280+ connect .kafka ().produce (topicName , new String (converter .fromConnectData (topicName , schema , struct )));
2281+
2282+ QuestDBUtils .assertSqlEventually (
2283+ "\" matrix_id\" ,\" data\" \r \n " +
2284+ "\" matrix1\" ,\" [[1.0,2.0],[3.0,4.0]]\" \r \n " ,
2285+ "select matrix_id, data from " + topicName ,
2286+ httpPort
2287+ );
2288+ }
2289+
2290+ @ ParameterizedTest
2291+ @ ValueSource (booleans = {true , false })
2292+ public void test3DDoubleArraySupport (boolean useHttp ) {
2293+ connect .kafka ().createTopic (topicName , 1 );
2294+ Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , useHttp );
2295+ props .put (VALUE_CONVERTER_CLASS_CONFIG , JsonConverter .class .getName ());
2296+ connect .configureConnector (ConnectTestUtils .CONNECTOR_NAME , props );
2297+ ConnectTestUtils .assertConnectorTaskRunningEventually (connect );
2298+
2299+ // Create schema with 3D double array
2300+ Schema innerArraySchema = SchemaBuilder .array (Schema .FLOAT64_SCHEMA ).build ();
2301+ Schema middleArraySchema = SchemaBuilder .array (innerArraySchema ).build ();
2302+ Schema arraySchema = SchemaBuilder .array (middleArraySchema ).build ();
2303+ Schema schema = SchemaBuilder .struct ()
2304+ .name ("com.example.Tensor" )
2305+ .field ("tensor_id" , Schema .STRING_SCHEMA )
2306+ .field ("data" , arraySchema )
2307+ .build ();
2308+
2309+ // Create 3D array data: [[[1.0, 2.0]], [[3.0, 4.0]]]
2310+ Struct struct = new Struct (schema )
2311+ .put ("tensor_id" , "tensor1" )
2312+ .put ("data" , Arrays .asList (
2313+ Arrays .asList (Arrays .asList (1.0 , 2.0 )),
2314+ Arrays .asList (Arrays .asList (3.0 , 4.0 ))
2315+ ));
2316+
2317+ connect .kafka ().produce (topicName , new String (converter .fromConnectData (topicName , schema , struct )));
2318+
2319+ QuestDBUtils .assertSqlEventually (
2320+ "\" tensor_id\" ,\" data\" \r \n " +
2321+ "\" tensor1\" ,\" [[[1.0,2.0]],[[3.0,4.0]]]\" \r \n " ,
2322+ "select tensor_id, data from " + topicName ,
2323+ httpPort
2324+ );
2325+ }
2326+
2327+ @ Test
2328+ public void testSchemaless2DArraySupport () {
2329+ connect .kafka ().createTopic (topicName , 1 );
2330+ Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , true );
2331+ props .put ("value.converter.schemas.enable" , "false" );
2332+ connect .configureConnector (ConnectTestUtils .CONNECTOR_NAME , props );
2333+ ConnectTestUtils .assertConnectorTaskRunningEventually (connect );
2334+
2335+ // Send JSON with 2D array
2336+ String json = "{\" experiment\" :\" test1\" ,\" results\" :[[1.5,2.5],[3.5,4.5]]}" ;
2337+ connect .kafka ().produce (topicName , json );
2338+
2339+ QuestDBUtils .assertSqlEventually (
2340+ "\" experiment\" ,\" results\" \r \n " +
2341+ "\" test1\" ,\" [[1.5,2.5],[3.5,4.5]]\" \r \n " ,
2342+ "select experiment, results from " + topicName ,
2343+ httpPort
2344+ );
2345+ }
2346+
2347+ @ Test
2348+ public void testSchemaless3DArraySupport () {
2349+ connect .kafka ().createTopic (topicName , 1 );
2350+ Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , true );
2351+ props .put ("value.converter.schemas.enable" , "false" );
2352+ connect .configureConnector (ConnectTestUtils .CONNECTOR_NAME , props );
2353+ ConnectTestUtils .assertConnectorTaskRunningEventually (connect );
2354+
2355+ // Send JSON with 3D array
2356+ String json = "{\" model\" :\" cnn1\" ,\" weights\" :[[[0.1,0.2]],[[0.3,0.4]]]}" ;
2357+ connect .kafka ().produce (topicName , json );
2358+
2359+ QuestDBUtils .assertSqlEventually (
2360+ "\" model\" ,\" weights\" \r \n " +
2361+ "\" cnn1\" ,\" [[[0.1,0.2]],[[0.3,0.4]]]\" \r \n " ,
2362+ "select model, weights from " + topicName ,
2363+ httpPort
2364+ );
2365+ }
2366+
2367+ @ ParameterizedTest
2368+ @ ValueSource (booleans = {true , false })
2369+ public void test2DFloatArraySupport (boolean useHttp ) {
2370+ connect .kafka ().createTopic (topicName , 1 );
2371+ Map <String , String > props = ConnectTestUtils .baseConnectorProps (questDBContainer , topicName , useHttp );
2372+ props .put (VALUE_CONVERTER_CLASS_CONFIG , JsonConverter .class .getName ());
2373+ connect .configureConnector (ConnectTestUtils .CONNECTOR_NAME , props );
2374+ ConnectTestUtils .assertConnectorTaskRunningEventually (connect );
2375+
2376+ // Create schema with 2D float array
2377+ Schema innerArraySchema = SchemaBuilder .array (Schema .FLOAT32_SCHEMA ).build ();
2378+ Schema arraySchema = SchemaBuilder .array (innerArraySchema ).build ();
2379+ Schema schema = SchemaBuilder .struct ()
2380+ .name ("com.example.FloatMatrix" )
2381+ .field ("id" , Schema .STRING_SCHEMA )
2382+ .field ("values" , arraySchema )
2383+ .build ();
2384+
2385+ // Create 2D array data with float values
2386+ Struct struct = new Struct (schema )
2387+ .put ("id" , "float_matrix1" )
2388+ .put ("values" , Arrays .asList (
2389+ Arrays .asList (1.1f , 2.2f ),
2390+ Arrays .asList (3.3f , 4.4f )
2391+ ));
2392+
2393+ connect .kafka ().produce (topicName , new String (converter .fromConnectData (topicName , schema , struct )));
2394+
2395+ QuestDBUtils .assertSqlEventually (
2396+ "\" id\" ,\" values\" \r \n " +
2397+ "\" float_matrix1\" ,\" [[1.100000023841858,2.200000047683716],[3.299999952316284,4.400000095367432]]\" \r \n " ,
2398+ "select id, \" values\" from " + topicName ,
2399+ httpPort
2400+ );
2401+ }
22532402}
0 commit comments