From ab6e2a116be6143bea3d2927297e0d5605e8415c Mon Sep 17 00:00:00 2001 From: kunni Date: Wed, 24 Jul 2024 07:56:27 +0800 Subject: [PATCH 1/4] [FLINK-35888][cdc-connector][paimon] Add e2e test for PaimonDataSink. --- .../flink-cdc-pipeline-e2e-tests/pom.xml | 185 +++++++++ .../tests/MySqlToPaimonE2eITCase.java | 371 ++++++++++++++++++ .../tests/utils/PipelineTestEnvironment.java | 6 + .../operators/schema/SchemaOperator.java | 2 +- 4 files changed, 563 insertions(+), 1 deletion(-) create mode 100644 flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml index 2326240b6d..35dfbd3ffc 100644 --- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml +++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml @@ -33,9 +33,17 @@ limitations under the License. 1.19.0 8.0.27 1.2.9_flink-${flink.major.version} + 2.8.5 + 2.3.9 + + org.apache.flink + flink-table-planner_2.12 + ${flink.version} + test + org.apache.flink flink-cdc-e2e-utils @@ -99,6 +107,19 @@ limitations under the License. test-jar test + + org.apache.flink + flink-connector-files + ${flink.version} + test + + + org.apache.flink + flink-cdc-pipeline-connector-paimon + ${project.version} + test-jar + test + org.apache.flink flink-connector-test-util @@ -113,6 +134,152 @@ limitations under the License. ${testcontainers.version} test + + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + test + + + org.apache.avro + avro + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + jdk.tools + jdk.tools + + + com.google.protobuf + protobuf-java + + + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + test + + + jdk.tools + jdk.tools + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + com.google.protobuf + protobuf-java + + + + + + org.apache.hive + hive-metastore + ${hive.version} + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.orc + orc-core + + + jdk.tools + jdk.tools + + + com.google.protobuf + protobuf-java + + + + + org.apache.hive + hive-exec + ${hive.version} + test + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.orc + orc-core + + + org.pentaho + * + + + org.apache.calcite + calcite-core + + + org.apache.calcite + calcite-druid + + + org.apache.calcite.avatica + avatica + + + org.apache.calcite + calcite-avatica + + + @@ -231,6 +398,24 @@ limitations under the License. ${project.build.directory}/dependencies + + org.apache.flink + flink-cdc-pipeline-connector-paimon + ${project.version} + paimon-cdc-pipeline-connector.jar + jar + ${project.build.directory}/dependencies + + + + org.apache.flink + flink-shaded-hadoop-2-uber + 2.8.3-10.0 + flink-shade-hadoop.jar + jar + ${project.build.directory}/dependencies + + diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java new file mode 100644 index 0000000000..e00d4ab611 --- /dev/null +++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java @@ -0,0 +1,371 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.cdc.pipeline.tests; + +import org.apache.flink.cdc.common.test.utils.TestUtils; +import org.apache.flink.cdc.connectors.mysql.testutils.MySqlContainer; +import org.apache.flink.cdc.connectors.mysql.testutils.MySqlVersion; +import org.apache.flink.cdc.connectors.mysql.testutils.UniqueDatabase; +import org.apache.flink.cdc.pipeline.tests.utils.PipelineTestEnvironment; +import org.apache.flink.table.api.EnvironmentSettings; +import org.apache.flink.table.api.TableEnvironment; +import org.apache.flink.types.Row; +import org.apache.flink.types.RowKind; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testcontainers.lifecycle.Startables; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Stream; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +/** End-to-end tests for mysql cdc to Paimon pipeline job. */ +@RunWith(Parameterized.class) +public class MySqlToPaimonE2eITCase extends PipelineTestEnvironment { + private static final Logger LOG = LoggerFactory.getLogger(MySqlToPaimonE2eITCase.class); + + public static final int TESTCASE_TIMEOUT_SECONDS = 60; + + private TableEnvironment tEnv; + + // ------------------------------------------------------------------------------------------ + // MySQL Variables (we always use MySQL as the data source for easier verifying) + // ------------------------------------------------------------------------------------------ + protected static final String MYSQL_TEST_USER = "mysqluser"; + protected static final String MYSQL_TEST_PASSWORD = "mysqlpw"; + + @ClassRule + public static final MySqlContainer MYSQL = + (MySqlContainer) + new MySqlContainer( + MySqlVersion.V8_0) // v8 support both ARM and AMD architectures + .withConfigurationOverride("docker/mysql/my.cnf") + .withSetupSQL("docker/mysql/setup.sql") + .withDatabaseName("flink-test") + .withUsername("flinkuser") + .withPassword("flinkpw") + .withNetwork(NETWORK) + .withNetworkAliases("mysql") + .withLogConsumer(new Slf4jLogConsumer(LOG)); + + protected final UniqueDatabase mysqlInventoryDatabase = + new UniqueDatabase(MYSQL, "mysql_inventory", MYSQL_TEST_USER, MYSQL_TEST_PASSWORD); + + public MySqlToPaimonE2eITCase() throws IOException {} + + @BeforeClass + public static void initializeContainers() { + LOG.info("Starting containers..."); + Startables.deepStart(Stream.of(MYSQL)).join(); + LOG.info("Containers are started."); + } + + @Before + public void before() throws Exception { + super.before(); + mysqlInventoryDatabase.createAndInitialize(); + tEnv = TableEnvironment.create(EnvironmentSettings.newInstance().inBatchMode().build()); + } + + @After + public void after() { + super.after(); + mysqlInventoryDatabase.dropDatabase(); + } + + @Test + public void testSyncWholeDatabase() throws Exception { + String warehouse = temporaryFolder.newFolder("paimon_" + UUID.randomUUID()).toString(); + tEnv.executeSql( + String.format( + "CREATE CATALOG paimon_catalog WITH ('type'='paimon', 'warehouse'='%s')", + warehouse)); + tEnv.executeSql("USE CATALOG paimon_catalog"); + String pipelineJob = + String.format( + "source:\n" + + " type: mysql\n" + + " hostname: mysql\n" + + " port: 3306\n" + + " username: %s\n" + + " password: %s\n" + + " tables: %s.\\.*\n" + + " server-id: 5400-5404\n" + + " server-time-zone: UTC\n" + + "\n" + + "sink:\n" + + " type: paimon\n" + + " catalog.properties.warehouse: %s\n" + + " catalog.properties.metastore: filesystem\n" + + " table.properties.bucket: 10\n" + + "\n" + + "pipeline:\n" + + " parallelism: 4", + MYSQL_TEST_USER, + MYSQL_TEST_PASSWORD, + mysqlInventoryDatabase.getDatabaseName(), + warehouse); + Path mysqlCdcJar = TestUtils.getResource("mysql-cdc-pipeline-connector.jar"); + Path paimonCdcConnector = TestUtils.getResource("paimon-cdc-pipeline-connector.jar"); + Path hadoopJar = TestUtils.getResource("flink-shade-hadoop.jar"); + Path mysqlDriverJar = TestUtils.getResource("mysql-driver.jar"); + submitPipelineJob(pipelineJob, mysqlCdcJar, paimonCdcConnector, mysqlDriverJar, hadoopJar); + waitUntilJobRunning(Duration.ofSeconds(30)); + LOG.info("Pipeline job is running"); + String query = + String.format( + "SELECT * FROM %s.%s", + mysqlInventoryDatabase.getDatabaseName(), "products"); + validateSinkResult( + query, + Arrays.asList( + Row.of( + 101, + "scooter", + "Small 2-wheel scooter", + 3.14f, + "red", + "{\"key1\": \"value1\"}", + "{\"coordinates\":[1,1],\"type\":\"Point\",\"srid\":0}"), + Row.of( + 102, + "car battery", + "12V car battery", + 8.1f, + "white", + "{\"key2\": \"value2\"}", + "{\"coordinates\":[2,2],\"type\":\"Point\",\"srid\":0}"), + Row.of( + 103, + "12-pack drill bits", + "12-pack of drill bits with sizes ranging from #40 to #3", + 0.8f, + "red", + "{\"key3\": \"value3\"}", + "{\"coordinates\":[3,3],\"type\":\"Point\",\"srid\":0}"), + Row.of( + 104, + "hammer", + "12oz carpenter's hammer", + 0.75f, + "white", + "{\"key4\": \"value4\"}", + "{\"coordinates\":[4,4],\"type\":\"Point\",\"srid\":0}"), + Row.of( + 105, + "hammer", + "14oz carpenter's hammer", + 0.875f, + "red", + "{\"k1\": \"v1\", \"k2\": \"v2\"}", + "{\"coordinates\":[5,5],\"type\":\"Point\",\"srid\":0}"), + Row.of(106, "hammer", "16oz carpenter's hammer", 1.0f, null, null, null), + Row.of(107, "rocks", "box of assorted rocks", 5.3f, null, null, null), + Row.of( + 108, + "jacket", + "water resistent black wind breaker", + 0.1f, + null, + null, + null), + Row.of(109, "spare tire", "24 inch spare tire", 22.2f, null, null, null))); + + query = + String.format( + "SELECT * FROM %s.%s", + mysqlInventoryDatabase.getDatabaseName(), "customers"); + validateSinkResult( + query, + Arrays.asList( + Row.of(101, "user_1", "Shanghai", "123567891234"), + Row.of(102, "user_2", "Shanghai", "123567891234"), + Row.of(103, "user_3", "Shanghai", "123567891234"), + Row.of(104, "user_4", "Shanghai", "123567891234"))); + + LOG.info("Begin incremental reading stage."); + // generate binlogs + String mysqlJdbcUrl = + String.format( + "jdbc:mysql://%s:%s/%s", + MYSQL.getHost(), + MYSQL.getDatabasePort(), + mysqlInventoryDatabase.getDatabaseName()); + try (Connection conn = + DriverManager.getConnection( + mysqlJdbcUrl, MYSQL_TEST_USER, MYSQL_TEST_PASSWORD); + Statement stat = conn.createStatement()) { + + stat.execute( + "INSERT INTO products VALUES (default,'jacket','water resistent white wind breaker',0.2, null, null, null);"); // 110 + stat.execute("UPDATE products SET description='18oz carpenter hammer' WHERE id=106;"); + stat.execute("UPDATE products SET weight='5.1' WHERE id=107;"); + + // modify table schema + stat.execute("ALTER TABLE products DROP COLUMN point_c;"); + stat.execute("DELETE FROM products WHERE id=101;"); + + stat.execute( + "INSERT INTO products VALUES (default,'scooter','Big 2-wheel scooter ',5.18, null, null);"); // 111 + stat.execute( + "INSERT INTO products VALUES (default,'finally', null, 2.14, null, null);"); // 112 + } catch (SQLException e) { + LOG.error("Update table for CDC failed.", e); + throw e; + } + query = + String.format( + "SELECT * FROM %s.%s", + mysqlInventoryDatabase.getDatabaseName(), "products"); + validateSinkResult( + query, + Arrays.asList( + Row.ofKind( + RowKind.INSERT, + 102, + "car battery", + "12V car battery", + 8.1f, + "white", + "{\"key2\": \"value2\"}"), + Row.ofKind( + RowKind.INSERT, + 103, + "12-pack drill bits", + "12-pack of drill bits with sizes ranging from #40 to #3", + 0.8f, + "red", + "{\"key3\": \"value3\"}"), + Row.ofKind( + RowKind.INSERT, + 104, + "hammer", + "12oz carpenter's hammer", + 0.75f, + "white", + "{\"key4\": \"value4\"}"), + Row.ofKind( + RowKind.INSERT, + 105, + "hammer", + "14oz carpenter's hammer", + 0.875f, + "red", + "{\"k1\": \"v1\", \"k2\": \"v2\"}"), + Row.ofKind( + RowKind.INSERT, + 106, + "hammer", + "18oz carpenter hammer", + 1.0f, + null, + null), + Row.ofKind( + RowKind.INSERT, + 107, + "rocks", + "box of assorted rocks", + 5.1f, + null, + null), + Row.ofKind( + RowKind.INSERT, + 108, + "jacket", + "water resistent black wind breaker", + 0.1f, + null, + null), + Row.ofKind( + RowKind.INSERT, + 109, + "spare tire", + "24 inch spare tire", + 22.2f, + null, + null), + Row.ofKind( + RowKind.INSERT, + 110, + "jacket", + "water resistent white wind breaker", + 0.2f, + null, + null), + Row.ofKind( + RowKind.INSERT, + 111, + "scooter", + "Big 2-wheel scooter ", + 5.18f, + null, + null), + Row.ofKind(RowKind.INSERT, 112, "finally", null, 2.14f, null, null))); + } + + private void validateSinkResult(String query, List expected) { + long startWaitingTimestamp = System.currentTimeMillis(); + boolean validateSucceed = false; + while (!validateSucceed) { + try { + List results = new ArrayList<>(); + tEnv.executeSql(query).collect().forEachRemaining(results::add); + assertEqualsInAnyOrder(expected, results); + validateSucceed = true; + } catch (Throwable e) { + if (System.currentTimeMillis() - startWaitingTimestamp + > TESTCASE_TIMEOUT_SECONDS * 1000_1000L) { + throw new RuntimeException("Failed to check result with given query."); + } + } + } + } + + private static void assertEqualsInAnyOrder(List expected, List actual) { + assertTrue(expected != null && actual != null); + assertEquals(expected.size(), actual.size()); + Set expectedSet = new HashSet<>(expected); + for (Row row : actual) { + Assert.assertTrue(expectedSet.contains(row)); + } + } +} diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/utils/PipelineTestEnvironment.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/utils/PipelineTestEnvironment.java index 65c0a202e5..7c2d55316d 100644 --- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/utils/PipelineTestEnvironment.java +++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/utils/PipelineTestEnvironment.java @@ -106,6 +106,9 @@ public void before() throws Exception { .withNetworkAliases(INTER_CONTAINER_JM_ALIAS) .withExposedPorts(JOB_MANAGER_REST_PORT) .withEnv("FLINK_PROPERTIES", flinkProperties) + .withFileSystemBind( + temporaryFolder.getRoot().getPath(), + temporaryFolder.getRoot().getPath()) .withLogConsumer(jobManagerConsumer); taskManagerConsumer = new ToStringConsumer(); taskManager = @@ -115,6 +118,9 @@ public void before() throws Exception { .withNetwork(NETWORK) .withNetworkAliases(INTER_CONTAINER_TM_ALIAS) .withEnv("FLINK_PROPERTIES", flinkProperties) + .withFileSystemBind( + temporaryFolder.getRoot().getPath(), + temporaryFolder.getRoot().getPath()) .dependsOn(jobManager) .withLogConsumer(taskManagerConsumer); diff --git a/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java b/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java index d1f468bfe7..f6eef23f88 100644 --- a/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java +++ b/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java @@ -315,9 +315,9 @@ private void handleSchemaChangeEvent(TableId tableId, SchemaChangeEvent schemaCh tableId, getRuntimeContext().getIndexOfThisSubtask()); output.collect(new StreamRecord<>(new FlushEvent(tableId))); - response.getSchemaChangeEvents().forEach(e -> output.collect(new StreamRecord<>(e))); // The request will block until flushing finished in each sink writer requestReleaseUpstream(); + response.getSchemaChangeEvents().forEach(e -> output.collect(new StreamRecord<>(e))); } } From 2219ad97ca0929b2c04cd96b924fd8999a53be41 Mon Sep 17 00:00:00 2001 From: kunni Date: Wed, 24 Jul 2024 17:10:59 +0800 Subject: [PATCH 2/4] [FLINK-35888][cdc-connector][paimon] Add e2e test for PaimonDataSink. --- .../flink-cdc-pipeline-e2e-tests/pom.xml | 1 + .../tests/MySqlToPaimonE2eITCase.java | 25 +++++++------------ .../operators/schema/SchemaOperator.java | 2 +- 3 files changed, 11 insertions(+), 17 deletions(-) diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml index 35dfbd3ffc..c5e871ce81 100644 --- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml +++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml @@ -193,6 +193,7 @@ limitations under the License. org.apache.hive hive-metastore ${hive.version} + test log4j diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java index e00d4ab611..7485b652d0 100644 --- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java +++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/MySqlToPaimonE2eITCase.java @@ -28,7 +28,6 @@ import org.apache.flink.types.RowKind; import org.junit.After; -import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -40,7 +39,6 @@ import org.testcontainers.containers.output.Slf4jLogConsumer; import org.testcontainers.lifecycle.Startables; -import java.io.IOException; import java.nio.file.Path; import java.sql.Connection; import java.sql.DriverManager; @@ -49,9 +47,7 @@ import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.UUID; import java.util.stream.Stream; @@ -63,7 +59,7 @@ public class MySqlToPaimonE2eITCase extends PipelineTestEnvironment { private static final Logger LOG = LoggerFactory.getLogger(MySqlToPaimonE2eITCase.class); - public static final int TESTCASE_TIMEOUT_SECONDS = 60; + public static final int TESTCASE_TIMEOUT_MILLIS_SECONDS = 60_000; private TableEnvironment tEnv; @@ -90,8 +86,6 @@ public class MySqlToPaimonE2eITCase extends PipelineTestEnvironment { protected final UniqueDatabase mysqlInventoryDatabase = new UniqueDatabase(MYSQL, "mysql_inventory", MYSQL_TEST_USER, MYSQL_TEST_PASSWORD); - public MySqlToPaimonE2eITCase() throws IOException {} - @BeforeClass public static void initializeContainers() { LOG.info("Starting containers..."); @@ -153,7 +147,7 @@ public void testSyncWholeDatabase() throws Exception { LOG.info("Pipeline job is running"); String query = String.format( - "SELECT * FROM %s.%s", + "SELECT * FROM %s.%s ORDER BY id", mysqlInventoryDatabase.getDatabaseName(), "products"); validateSinkResult( query, @@ -212,7 +206,7 @@ public void testSyncWholeDatabase() throws Exception { query = String.format( - "SELECT * FROM %s.%s", + "SELECT * FROM %s.%s ORDER BY id", mysqlInventoryDatabase.getDatabaseName(), "customers"); validateSinkResult( query, @@ -254,7 +248,7 @@ public void testSyncWholeDatabase() throws Exception { } query = String.format( - "SELECT * FROM %s.%s", + "SELECT * FROM %s.%s ORDER BY id", mysqlInventoryDatabase.getDatabaseName(), "products"); validateSinkResult( query, @@ -349,23 +343,22 @@ private void validateSinkResult(String query, List expected) { try { List results = new ArrayList<>(); tEnv.executeSql(query).collect().forEachRemaining(results::add); - assertEqualsInAnyOrder(expected, results); + assertEqualsInSameOrder(expected, results); validateSucceed = true; } catch (Throwable e) { if (System.currentTimeMillis() - startWaitingTimestamp - > TESTCASE_TIMEOUT_SECONDS * 1000_1000L) { + > TESTCASE_TIMEOUT_MILLIS_SECONDS) { throw new RuntimeException("Failed to check result with given query."); } } } } - private static void assertEqualsInAnyOrder(List expected, List actual) { + private static void assertEqualsInSameOrder(List expected, List actual) { assertTrue(expected != null && actual != null); assertEquals(expected.size(), actual.size()); - Set expectedSet = new HashSet<>(expected); - for (Row row : actual) { - Assert.assertTrue(expectedSet.contains(row)); + for (int i = 0; i < expected.size(); i++) { + assertEquals(expected.get(i), actual.get(i)); } } } diff --git a/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java b/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java index f6eef23f88..d1f468bfe7 100644 --- a/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java +++ b/flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/operators/schema/SchemaOperator.java @@ -315,9 +315,9 @@ private void handleSchemaChangeEvent(TableId tableId, SchemaChangeEvent schemaCh tableId, getRuntimeContext().getIndexOfThisSubtask()); output.collect(new StreamRecord<>(new FlushEvent(tableId))); + response.getSchemaChangeEvents().forEach(e -> output.collect(new StreamRecord<>(e))); // The request will block until flushing finished in each sink writer requestReleaseUpstream(); - response.getSchemaChangeEvents().forEach(e -> output.collect(new StreamRecord<>(e))); } } From ad7da9c33d40a5748e5a3c11727a539380846401 Mon Sep 17 00:00:00 2001 From: kunni Date: Mon, 29 Jul 2024 14:14:18 +0800 Subject: [PATCH 3/4] [FLINK-35888][cdc-connector][paimon] address comment. --- .../flink-cdc-pipeline-connector-paimon/pom.xml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml index 2c80c8aa9c..324d864bbf 100644 --- a/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml +++ b/flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml @@ -278,6 +278,19 @@ limitations under the License. + + + org.apache.maven.plugins + maven-jar-plugin + + + test-jar + + test-jar + + + + From 22fa9d9178cdec9f9d9ed04a7068d0f3885d75ed Mon Sep 17 00:00:00 2001 From: kunni Date: Mon, 29 Jul 2024 14:32:35 +0800 Subject: [PATCH 4/4] [FLINK-35888][cdc-connector][paimon] address comment. --- flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml index c5e871ce81..5812046f56 100644 --- a/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml +++ b/flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/pom.xml @@ -35,6 +35,7 @@ limitations under the License. 1.2.9_flink-${flink.major.version} 2.8.5 2.3.9 + 0.7.0-incubating @@ -113,6 +114,13 @@ limitations under the License. ${flink.version} test + + + org.apache.paimon + paimon-flink-${flink.major.version} + ${paimon.version} + test + org.apache.flink flink-cdc-pipeline-connector-paimon