diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml
index e10d21f28f..68e05c1b3c 100644
--- a/.github/workflows/e2e.yml
+++ b/.github/workflows/e2e.yml
@@ -1,4 +1,4 @@
-# Copyright © 2021 Cask Data, Inc.
+# Copyright © 2021-2023 Cask Data, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
@@ -31,7 +31,7 @@ jobs:
# 3) For PRs that are labeled as build and
# - It's a code change
# - A build label was just added
- # A bit complex, but prevents builds when other labels are manipulated
+ # A bit complex but prevents builds when other labels are manipulated
if: >
github.event_name == 'workflow_dispatch'
|| github.event_name == 'push'
@@ -40,7 +40,7 @@ jobs:
)
strategy:
matrix:
- tests: [bigquery, common, gcs, pubsub, spanner, gcscreate, gcsdelete, gcsmove, bigqueryexecute, gcscopy]
+ tests: [bigquerymultitable]
fail-fast: false
steps:
# Pinned 1.0.0 version
@@ -59,8 +59,9 @@ jobs:
- name: Checkout e2e test repo
uses: actions/checkout@v3
with:
- repository: cdapio/cdap-e2e-tests
+ repository: Vipinofficial11/cdap-e2e-tests
path: e2e
+ ref: testBQ
- name: Cache
uses: actions/cache@v3
with:
@@ -68,13 +69,38 @@ jobs:
key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-${{ github.workflow }}
+
+ - name: Get Secrets from GCP Secret Manager
+ id: secrets
+ uses: 'google-github-actions/get-secretmanager-secrets@v0'
+ with:
+ secrets: |-
+ MYSQL_HOST:cdapio-github-builds/MYSQL_HOST
+ MYSQL_USERNAME:cdapio-github-builds/MYSQL_USERNAME
+ MYSQL_PASSWORD:cdapio-github-builds/MYSQL_PASSWORD
+ MYSQL_PORT:cdapio-github-builds/MYSQL_PORT
+ BQMT_CONNECTION_STRING:cdapio-github-builds/BQMT_CONNECTION_STRING
+
- name: Run required e2e tests
if: github.event_name != 'workflow_dispatch' && github.event_name != 'push' && steps.filter.outputs.e2e-test == 'false'
run: python3 e2e/src/main/scripts/run_e2e_test.py --testRunner **/${{ matrix.tests }}/**/TestRunnerRequired.java
+ env:
+ MYSQL_HOST: ${{ steps.secrets.outputs.MYSQL_HOST }}
+ MYSQL_USERNAME: ${{ steps.secrets.outputs.MYSQL_USERNAME }}
+ MYSQL_PASSWORD: ${{ steps.secrets.outputs.MYSQL_PASSWORD }}
+ MYSQL_PORT: ${{ steps.secrets.outputs.MYSQL_PORT }}
+ BQMT_CONNECTION_STRING: ${{ steps.secrets.outputs.BQMT_CONNECTION_STRING }}
+
- name: Run all e2e tests
if: github.event_name == 'workflow_dispatch' || github.event_name == 'push' || steps.filter.outputs.e2e-test == 'true'
run: python3 e2e/src/main/scripts/run_e2e_test.py --testRunner **/${{ matrix.tests }}/**/TestRunner.java
- - name: Upload debug files
+ env:
+ MYSQL_HOST: ${{ steps.secrets.outputs.MYSQL_HOST }}
+ MYSQL_USERNAME: ${{ steps.secrets.outputs.MYSQL_USERNAME }}
+ MYSQL_PASSWORD: ${{ steps.secrets.outputs.MYSQL_PASSWORD }}
+ MYSQL_PORT: ${{ steps.secrets.outputs.MYSQL_PORT }}
+ BQMT_CONNECTION_STRING: ${{ steps.secrets.outputs.BQMT_CONNECTION_STRING }}
+ - name: Upload debug files
uses: actions/upload-artifact@v3
if: always()
with:
diff --git a/pom.xml b/pom.xml
index fc710f9c03..cbfb2335c3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1247,6 +1247,12 @@
0.4.0-SNAPSHOT
test
+
+ mysql
+ mysql-connector-java
+ 8.0.25
+ test
+
ch.qos.logback
logback-classic
diff --git a/src/e2e-test/features/bigquerymultitable/BiqQueryMultiTableSinkError.feature b/src/e2e-test/features/bigquerymultitable/BiqQueryMultiTableSinkError.feature
new file mode 100644
index 0000000000..51db62e09d
--- /dev/null
+++ b/src/e2e-test/features/bigquerymultitable/BiqQueryMultiTableSinkError.feature
@@ -0,0 +1,79 @@
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+@BQMT_SINK
+Feature: BigQueryMultiTable sink - Validate BigQueryMultiTable sink plugin error scenarios
+
+ @BQMT_Required
+ Scenario Outline: Verify BigQueryMultiTable Sink properties validation errors for mandatory fields
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ Then Click on the Validate button
+ Then Validate mandatory property error for ""
+ Examples:
+ | property |
+ | dataset |
+
+ Scenario:Verify BQMT Sink properties validation errors for incorrect value of chunk size
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter input plugin property: "dataset" with value: "dataset"
+ Then Override Service account details if set in environment variables
+ Then Enter input plugin property: "gcsChunkSize" with value: "bqmtInvalidChunkSize"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "gcsChunkSize" is displaying an in-line error message: "errorMessageIncorrectBQMTChunkSize"
+
+ @BQMT_Required
+ Scenario:Verify BQMT Sink properties validation errors for incorrect dataset
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ And Replace input plugin property: "project" with value: "projectId"
+ Then Override Service account details if set in environment variables
+ Then Enter input plugin property: "dataset" with value: "bqmtInvalidSinkDataset"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "dataset" is displaying an in-line error message: "errorMessageIncorrectBQMTDataset"
+
+ Scenario:Verify BQMT Sink properties validation errors for incorrect reference name
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter input plugin property: "dataset" with value: "dataset"
+ Then Override Service account details if set in environment variables
+ Then Enter input plugin property: "referenceName" with value: "bqmtInvalidSinkReferenceName"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageIncorrectBQMTReferenceName"
+
+ Scenario:Verify BQMT Sink properties validation errors for incorrect value of temporary bucket name
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQueryMultiTable" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter input plugin property: "dataset" with value: "dataset"
+ Then Override Service account details if set in environment variables
+ Then Enter input plugin property: "bucket" with value: "bqmtInvalidTemporaryBucket"
+ Then Click on the Validate button
+ Then Verify that the Plugin Property: "bucket" is displaying an in-line error message: "errorMessageIncorrectBQMTBucketName"
diff --git a/src/e2e-test/features/bigquerymultitable/MultipleDatabaseTableToBigQueryMultiTableSink_WithMacro.feature b/src/e2e-test/features/bigquerymultitable/MultipleDatabaseTableToBigQueryMultiTableSink_WithMacro.feature
new file mode 100644
index 0000000000..11ff2676ac
--- /dev/null
+++ b/src/e2e-test/features/bigquerymultitable/MultipleDatabaseTableToBigQueryMultiTableSink_WithMacro.feature
@@ -0,0 +1,131 @@
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+@BQMT_SINK
+Feature: BigQueryMultiTable sink -Verification of Multiple Database Tables to BigQueryMultiTable successful data transfer using macros
+
+ @MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required
+ Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with all datatypes using macros
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "Multiple Database Tables"
+ Then Replace input plugin property: "referenceName" with value: "ref"
+ Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
+ Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
+ Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
+ And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
+ Then Click on the Add Button of the property: "sqlStatements" with value:
+ | selectQuery|
+ Then Validate "Multiple Database Tables" plugin properties
+ And Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
+ Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
+ Then Click on the Macro button of Property: "serviceAccountType" and set the value to: "serviceAccountType"
+ Then Click on the Macro button of Property: "serviceAccountFilePath" and set the value to: "serviceAccount"
+ Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "allowSchema"
+ Then Validate "BigQuery Multi Table" plugin properties
+ And Close the Plugin Properties page
+ Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
+ Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
+ Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table
+
+ @MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required
+ Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with split field using macros
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "Multiple Database Tables"
+ Then Replace input plugin property: "referenceName" with value: "ref"
+ Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
+ Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
+ Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
+ And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
+ Then Click on the Add Button of the property: "sqlStatements" with value:
+ | selectQuery|
+ Then Validate "Multiple Database Tables" plugin properties
+ And Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
+ Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
+ Then Click on the Macro button of Property: "serviceAccountType" and set the value to: "serviceAccountType"
+ Then Click on the Macro button of Property: "serviceAccountFilePath" and set the value to: "serviceAccount"
+ Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
+ Then Click on the Macro button of Property: "SplitField" and set the value to: "bqmtSplitField"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "allowSchema"
+ Then Validate "BigQuery Multi Table" plugin properties
+ And Close the Plugin Properties page
+ Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
+ Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "splitField" for key "bqmtSplitField"
+ Then Run the preview of pipeline with runtime arguments
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Enter runtime argument value "projectId" for key "bqProjectId"
+ Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
+ Then Enter runtime argument value "serviceAccountType" for key "serviceAccountType"
+ Then Enter runtime argument value "serviceAccount" for key "serviceAccount"
+ Then Enter runtime argument value "dataset" for key "bqDataset"
+ Then Enter runtime argument value "splitField" for key "bqmtSplitField"
+ Then Run the Pipeline in Runtime with runtime arguments
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table
diff --git a/src/e2e-test/features/bigquerymultitable/MultipleDatabaseTableToBiqQueryMultiTableSink.feature b/src/e2e-test/features/bigquerymultitable/MultipleDatabaseTableToBiqQueryMultiTableSink.feature
new file mode 100644
index 0000000000..5548177e0a
--- /dev/null
+++ b/src/e2e-test/features/bigquerymultitable/MultipleDatabaseTableToBiqQueryMultiTableSink.feature
@@ -0,0 +1,104 @@
+# Copyright © 2023 Cask Data, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+@BQMT_SINK
+Feature: BigQueryMultiTable sink -Verification of MultipleDatabaseTable to BigQueryMultiTable successful data transfer
+
+ @MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required
+ Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with all datatypes
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "Multiple Database Tables"
+ Then Replace input plugin property: "referenceName" with value: "ref"
+ Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
+ Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
+ Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
+ And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
+ Then Click on the Add Button of the property: "sqlStatements" with value:
+ | selectQuery|
+ Then Validate "Multiple Database Tables" plugin properties
+ And Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter input plugin property: "datasetProject" with value: "projectId"
+ And Enter input plugin property: "dataset" with value: "dataset"
+ Then Override Service account details if set in environment variables
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "allowSchema"
+ Then Validate "BigQuery Multi Table" plugin properties
+ And Close the Plugin Properties page
+ Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table
+
+ @MULTIPLEDATABASETABLE_SOURCE_TEST @BQMT_Required
+ Scenario:Verify data is getting transferred from Multiple Database Tables to BQMT sink with split field
+ Given Open Datafusion Project to configure pipeline
+ When Expand Plugin group in the LHS plugins list: "Source"
+ When Select plugin: "Multiple Database Tables" from the plugins list as: "Source"
+ When Expand Plugin group in the LHS plugins list: "Sink"
+ When Select plugin: "BigQuery Multi Table" from the plugins list as: "Sink"
+ Then Navigate to the properties page of plugin: "Multiple Database Tables"
+ Then Replace input plugin property: "referenceName" with value: "ref"
+ Then Enter input plugin property: "connectionString" with value: "connectionString" for Credentials and Authorization related fields
+ Then Replace input plugin property: "jdbcPluginName" with value: "mysql"
+ Then Replace input plugin property: "user" with value: "user" for Credentials and Authorization related fields
+ Then Replace input plugin property: "password" with value: "pass" for Credentials and Authorization related fields
+ And Select radio button plugin property: "dataSelectionMode" with value: "sql-statements"
+ Then Click on the Add Button of the property: "sqlStatements" with value:
+ | selectQuery|
+ Then Validate "Multiple Database Tables" plugin properties
+ And Close the Plugin Properties page
+ Then Navigate to the properties page of plugin: "BigQuery Multi Table"
+ And Enter input plugin property: "referenceName" with value: "Reference"
+ And Replace input plugin property: "project" with value: "projectId"
+ And Enter input plugin property: "datasetProject" with value: "projectId"
+ And Enter input plugin property: "dataset" with value: "dataset"
+ Then Override Service account details if set in environment variables
+ Then Verify toggle plugin property: "truncateTable" is toggled to: "true"
+ Then Enter input plugin property: "splitField" with value: "bqmtSplitField"
+ Then Click plugin property: "truncateTable"
+ Then Click plugin property: "allowSchema"
+ Then Validate "BigQuery Multi Table" plugin properties
+ And Close the Plugin Properties page
+ Then Connect plugins: "Multiple Database Tables" and "BigQuery Multi Table" to establish connection
+ Then Save the pipeline
+ Then Preview and run the pipeline
+ Then Wait till pipeline preview is in running state
+ Then Open and capture pipeline preview logs
+ Then Verify the preview run status of pipeline in the logs is "succeeded"
+ Then Close the pipeline logs
+ Then Close the preview
+ Then Deploy the pipeline
+ Then Run the Pipeline in Runtime
+ Then Wait till pipeline is in running state
+ Then Open and capture logs
+ Then Verify the pipeline status is "Succeeded"
+ Then Validate the values of records transferred to BQMT sink is equal to the value from source MultiDatabase table
diff --git a/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/TestRunner.java b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/TestRunner.java
new file mode 100644
index 0000000000..7807102f3d
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/TestRunner.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.bigquerymultitable.runners;
+
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+/**
+ * Test Runner to execute BQMT action cases.
+ */
+@RunWith(Cucumber.class)
+@CucumberOptions(
+ features = {"src/e2e-test/features"},
+ glue = {"io.cdap.plugin.bigquerymultitable.stepsdesign", "io.cdap.plugin.gcs.stepsdesign", "stepsdesign",
+ "io.cdap.plugin.common.stepsdesign"},
+ tags = {"@BQMT_SINK"},
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report/bigquerymultitable",
+ "json:target/cucumber-reports/cucumber-bigquerymultitable.json",
+ "junit:target/cucumber-reports/cucumber-bigquerymultitable.xml"}
+)
+public class TestRunner {
+}
diff --git a/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/TestRunnerRequired.java b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/TestRunnerRequired.java
new file mode 100644
index 0000000000..b3684f961c
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/TestRunnerRequired.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package io.cdap.plugin.bigquerymultitable.runners;
+
+import io.cucumber.junit.Cucumber;
+import io.cucumber.junit.CucumberOptions;
+import org.junit.runner.RunWith;
+
+/**
+ * Test Runner to execute only required BQMT action cases.
+ */
+@RunWith(Cucumber.class)
+@CucumberOptions(
+ features = {"src/e2e-test/features"},
+ glue = {"io.cdap.plugin.bigquerymultitable.stepsdesign", "io.cdap.plugin.gcs.stepsdesign", "stepsdesign",
+ "io.cdap.plugin.common.stepsdesign"},
+ tags = {"@BQMT_Required"},
+ //TODO: Enable test once issue is fixed https://cdap.atlassian.net/browse/PLUGIN-1669
+ monochrome = true,
+ plugin = {"pretty", "html:target/cucumber-html-report/bigquerymultitable-required",
+ "json:target/cucumber-reports/cucumber-bigquerymultitable-required.json",
+ "junit:target/cucumber-reports/cucumber-bigquerymultitable-required.xml"}
+)
+public class TestRunnerRequired {
+}
+
diff --git a/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/package-info.java b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/package-info.java
new file mode 100644
index 0000000000..e9ef491dc0
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/runners/package-info.java
@@ -0,0 +1,4 @@
+/**
+ * Package contains the runner for the BQMT features.
+ */
+package io.cdap.plugin.bigquerymultitable.runners;
diff --git a/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/stepdesign/BQMultiTableValidation.java b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/stepdesign/BQMultiTableValidation.java
new file mode 100644
index 0000000000..c98873d070
--- /dev/null
+++ b/src/e2e-test/java/io/cdap/plugin/bigquerymultitable/stepdesign/BQMultiTableValidation.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright © 2023 Cask Data, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package io.cdap.plugin.bigquerymultitable.stepdesign;
+
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.FieldValueList;
+import com.google.cloud.bigquery.TableResult;
+import com.google.gson.Gson;
+import com.google.gson.JsonObject;
+import io.cdap.e2e.utils.BigQueryClient;
+import io.cdap.e2e.utils.PluginPropertyUtils;
+import org.junit.Assert;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+
+/**
+ * BQMTMultiTable Validation.
+ */
+
+public class BQMultiTableValidation {
+
+ private static final String tabA = "tabA";
+
+ private static final String tabB = "tabB";
+
+ /**
+ * Validates data from a MySQL table against corresponding BigQuery tables.
+ *
+ * @param sourceTable The name of the MySQL table to be validated.
+ * @return true if validation passes for all tables, false otherwise.
+ * @throws IOException If an I/O error occurs.
+ * @throws InterruptedException If the current thread is interrupted while waiting.
+ * @throws SQLException If an SQL error occurs.
+ * @throws ClassNotFoundException If the class specified is not found.
+ */
+
+ public static boolean validateMySqlToBQRecordValues(String sourceTable) throws IOException, InterruptedException,
+ SQLException, ClassNotFoundException {
+ // using MySql database and tables in Multiple Database table plugin
+ List targetTables = getTableNameFromMySQL();
+ List sourceList = new ArrayList<>();
+ sourceList.add(sourceTable);
+ List