diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 51223b8c7..45cca0614 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -40,7 +40,7 @@ jobs: ) strategy: matrix: - module: [mysql-plugin, postgresql-plugin, oracle-plugin, mssql-plugin] + module: [mysql-plugin, postgresql-plugin, oracle-plugin, mssql-plugin, cloudsql-mysql-plugin] fail-fast: false steps: diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature new file mode 100644 index 000000000..091884452 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTime.feature @@ -0,0 +1,65 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql sink- Verify CloudMySql sink plugin design time scenarios + + Scenario: To verify CloudMySql sink plugin validation with mandatory properties + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudMySql sink plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify CloudMySql source plugin validation setting up connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "referencename" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature new file mode 100644 index 000000000..adf99440c --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeValidation.feature @@ -0,0 +1,132 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql sink- Verify CloudsqlMysql sink plugin design time validation scenarios + + @CLOUDMYSQL_SOURCE_TEST @CLOUDMYSQL_TARGET_TEST + Scenario: To verify CloudMySql sink plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "invalidDatabaseName" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSinkDatabase" on the header + + @CLOUDMYSQL_SOURCE_TEST @CLOUDMYSQL_TARGET_TEST + Scenario: To verify CloudMySql sink plugin validation error message with invalid tablename + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter input plugin property: "tableName" with value: "Invalidtable" + Then Click on the Validate button + Then Verify that the Plugin Property: "tableName" is displaying an in-line error message: "errorMessageInvalidTableName" + + Scenario: To verify CloudMySql sink plugin validation error message with invalid reference Name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudMySqlInvalidReferenceName" + + Scenario: To verify CloudMySql sink plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + Scenario: To verify CloudMySql sink plugin validation error message with ConnectionName + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Enter input plugin property: "connectionName" with value: "cdf-athena:us-central1" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "TestDatabase" + Then Enter input plugin property: "tableName" with value: "mytable" + Then Click on the Validate button + Then Verify that the Plugin Property: "connectionName" is displaying an in-line error message: "errorMessageConnectionName" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature new file mode 100644 index 000000000..6919841b0 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlDesignTimeWithMacro.feature @@ -0,0 +1,51 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql sink- Verify CloudMySql sink plugin design time macro scenarios + + Scenario: To verify CloudMySql sink plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Select radio button plugin property: "instanceType" with value: "public" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "TestDatabase" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" + Then Click on the Validate button + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + + Scenario: To verify cloudsql sink plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "TestDatabase" + Then Click on the Macro button of Property: "tableName" and set the value to: "mytable" + Then Click on the Validate button + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page \ No newline at end of file diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature new file mode 100644 index 000000000..e2f1eb1fe --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTime.feature @@ -0,0 +1,150 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql Sink - Run time scenarios + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1633 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1633 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1633 + Scenario: To verify data is getting transferred from BigQuery source to CloudSMySQL sink with Advanced property Connection timeout + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + And Navigate to the properties page of plugin: "BigQuery" + And Enter input plugin property: "referenceName" with value: "Reference" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Replace input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema" + Then Validate "BigQuery" plugin properties + And Close the Plugin Properties page + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Replace input plugin property: "connectionTimeout" with value: "connectionTimeout" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature new file mode 100644 index 000000000..717f9dcf5 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/sink/CloudMySqlRunTimeMacro.feature @@ -0,0 +1,188 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql Sink - Run time scenarios (macro) + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLMySQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLMySQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLMySQLPassword" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLMySQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLMySQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLMySQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLMySQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLMySQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLMySQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE @PLUGIN-1633 + Scenario: To verify data is getting transferred from BigQuery source to CloudMySql sink using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "Tablename" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "targetTable" for key "Tablename" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "targetTable" for key "Tablename" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source BigQuery table + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + Scenario: Verify pipeline failure message in logs when user provides invalid Table Name of plugin with Macros + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Click on the Macro button of Property: "tableName" and set the value to: "invalidTablename" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "invalidTablename" for key "invalidTablename" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidTableName | + + @BQ_SOURCE_TEST @CLOUDMYSQL_TEST_TABLE + Scenario: Verify pipeline failure message in logs when user provides invalid credentials of plugin with Macros + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "BigQuery" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "BigQuery" and "CloudSQL MySQL" to establish connection + Then Navigate to the properties page of plugin: "BigQuery" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + And Replace input plugin property: "project" with value: "projectId" + And Enter input plugin property: "datasetProject" with value: "datasetprojectId" + And Enter input plugin property: "dataset" with value: "dataset" + And Enter input plugin property: "table" with value: "bqSourceTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "Username" + Then Click on the Macro button of Property: "password" and set the value to: "Password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "invalidUserName" for key "Username" + Then Enter runtime argument value "invalidPassword" for key "Password" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidCredentials | + diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature new file mode 100644 index 000000000..414c4d879 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTime.feature @@ -0,0 +1,74 @@ +# Copyright © 2023 Cask Data, Inc. +## +## Licensed under the Apache License, Version 2.0 (the "License"); you may not +## use this file except in compliance with the License. You may obtain a copy of +## the License at +## +## http://www.apache.org/licenses/LICENSE-2.0 +## +## Unless required by applicable law or agreed to in writing, software +## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +## License for the specific language governing permissions and limitations under +# the License.. + +@CloudMySql +Feature: CloudMySql source- Verify CloudMySql source plugin design time scenarios + + @CLOUDMYSQL_SOURCE_TEST + Scenario: To verify CloudMySql source plugin validation with mandatory properties + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Close the Plugin Properties page + + @CLOUDMYSQL_SOURCE_TEST + Scenario: To verify CloudMySql source plugin validation with connection and basic details for connectivity + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Close the Plugin Properties page + + @CLOUDMYSQL_SOURCE_TEST + Scenario: To verify CloudMySql source plugin validation setting up connection arguments + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "referencename" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature new file mode 100644 index 000000000..9e54df419 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeVaidation.feature @@ -0,0 +1,182 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql source- Verify CloudMySql source plugin design time validation scenarios + + Scenario: To verify CloudMySql source plugin validation error message with invalid database + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "invalidDatabase" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageInvalidSourceDatabase" on the header + + Scenario: To verify CloudMySql source plugin validation error message with invalid import query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "invalidImportQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "importQuery" is displaying an in-line error message: "errorMessageInvalidImportQuery" + + Scenario: To verify CloudMySql source plugin validation error message with invalid reference name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "invalidRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "referenceName" is displaying an in-line error message: "errorMessageCloudMySqlInvalidReferenceName" + + Scenario: To verify CloudMySql source plugin validation error message with blank username + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Validate button + Then Verify that the Plugin Property: "user" is displaying an in-line error message: "errorMessageBlankUsername" + + Scenario: To verify CloudMySql source plugin validation error message with blank password + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Click on the Validate button + Then Verify that the Plugin is displaying an error message: "errorMessageBlankPassword" on the header + + Scenario: To verify CloudMySql source plugin validation error message when fetch size is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Replace input plugin property: "fetchSize" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "fetchSize" is displaying an in-line error message: "errorMessageInvalidFetchSize" + + Scenario: To verify CloudMySql source plugin validation error message with number of splits without split by field name + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageBlankSplitBy" + Then Verify that the Plugin Property: "splitBy" is displaying an in-line error message: "errorMessageBlankSplitBy" + + Scenario: To verify CloudMySql source plugin validation error message when number of Split value is changed to zero + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Replace input plugin property: "numSplits" with value: "zeroValue" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageInvalidNumberOfSplits" + + Scenario: To verify CloudMySql source plugin validation error message when number of Split value is not a number + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Replace input plugin property: "numSplits" with value: "zeroSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplitNotNumber" + + Scenario: To verify CloudMySql source plugin validation error message with blank bounding query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "insertQuery" + Then Replace input plugin property: "numSplits" with value: "numberOfSplits" + Then Click on the Validate button + Then Verify that the Plugin Property: "boundingQuery" is displaying an in-line error message: "errorMessageBoundingQuery" + Then Verify that the Plugin Property: "numSplits" is displaying an in-line error message: "errorMessageNumberOfSplits" diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature new file mode 100644 index 000000000..203aa670c --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlDesignTimeWithMacro.feature @@ -0,0 +1,51 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql source- Verify CloudMySql source plugin design time macro scenarios + + @CLOUDMYSQL_SOURCE_TEST + Scenario: To verify CloudMySql source plugin validation with macro enabled fields for connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "DriverName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Select radio button plugin property: "instanceType" with value: "public" + Then Click on the Macro button of Property: "user" and set the value to: "username" + Then Click on the Macro button of Property: "password" and set the value to: "password" + Then Click on the Macro button of Property: "connectionArguments" and set the value to: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Close the Plugin Properties page + + @CLOUDMYSQL_SOURCE_TEST + Scenario: To verify cloudsql source plugin validation with macro enabled fields for basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "sourceRef" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Validate button + Then Close the Plugin Properties page diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature new file mode 100644 index 000000000..0f3704097 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTime.feature @@ -0,0 +1,231 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql Source - Run time scenarios + + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully with all datatypes + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_TARGET_DATATYPES_TEST @PLUGIN-20670 + Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql sink successfully + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Verify the Output Schema matches the Expected Schema: "datatypesSchema" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_TARGET_DATATYPES_TEST @PLUGIN-20670 + Scenario: To verify data is getting transferred from CloudMySql source to CloudMySql successfully when connection arguments are set + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter key value pairs for plugin property: "connectionArguments" with values from json: "connectionArgumentsList" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_DATATYPES_TEST @CLOUDMYSQL_TARGET_DATATYPES_TEST + Scenario: Verify user should not be able to deploy and run the pipeline when plugin is configured with invalid bounding query + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Enter textarea plugin property: "boundingQuery" with value: "invalidboundQuery" + Then Enter textarea plugin property: "importQuery" with value: "cloudsqlimportQuery" + Then Replace input plugin property: "splitBy" with value: "splitby" + Then Replace input plugin property: "numSplits" with value: "numbersplitsgenerate" + Then Click on the Get Schema button + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Wait till pipeline is in running state + And Verify the pipeline status is "Failed" + Then Open Pipeline logs and verify Log entries having below listed Level and Message: + | Level | Message | + | ERROR | errorLogsMessageInvalidBoundingQuery | diff --git a/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature new file mode 100644 index 000000000..f1ae4ed52 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/features/source/CloudMySqlRunTimeMacro.feature @@ -0,0 +1,307 @@ +# Copyright © 2023 Cask Data, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +@CloudMySql @CloudMySql_Required +Feature: CloudMySql - Verify CloudMySql plugin data transfer with macro arguments + + @CLOUDMYSQL_SOURCE_TEST @CLOUDMYSQL_SINK_TEST @PLUGIN-20670 + Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "Username" + Then Click on the Macro button of Property: "password" and set the value to: "Password" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Open and capture pipeline preview logs + Then Verify the preview run status of pipeline in the logs is "succeeded" + Then Close the pipeline logs + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driver" for key "driverName" + Then Enter runtime argument value "username" for key "Username" + Then Enter runtime argument value "password" for key "Password" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_TEST @CLOUDMYSQL_SINK_TEST @PLUGIN-20670 + Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_TEST @CLOUDMYSQL_TARGET_TEST @PLUGIN-20670 + Scenario: To verify data is getting transferred from CloudMySql to CloudMySql successfully using macro arguments in advance section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "CloudSQL MySQL2" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + And Click on the Macro button of Property: "fetchSize" and set the value to: "fetchSize" + And Click on the Macro button of Property: "splitBy" and set the value to: "SplitBy" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "CloudSQL MySQL2" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + Then Replace input plugin property: "tableName" with value: "targetTable" + Then Validate "CloudSQL MySQL2" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitby" for key "SplitBy" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitby" for key "SplitBy" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in connection section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Click on the Macro button of Property: "jdbcPluginName" and set the value to: "cloudSQLMySQLDriverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Click on the Macro button of Property: "user" and set the value to: "cloudSQLMySQLUsername" + Then Click on the Macro button of Property: "password" and set the value to: "cloudSQLMySQLPassword" + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "driverName" for key "cloudSQLMySQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLMySQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLMySQLPassword" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "driverName" for key "cloudSQLMySQLDriverName" + Then Enter runtime argument value from environment variable "username" for key "cloudSQLMySQLUsername" + Then Enter runtime argument value from environment variable "password" for key "cloudSQLMySQLPassword" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in basic section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Replace input plugin property: "database" with value: "DatabaseName" + Then Click on the Macro button of Property: "importQuery" and set the value in textarea: "CloudMySqlImportQuery" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "selectQuery" for key "CloudMySqlImportQuery" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table + + @CLOUDMYSQL_SOURCE_TEST @BQ_SINK_TEST + Scenario: To verify data is getting transferred from CloudMySql source to BigQuery sink successfully using macro arguments in advance section + Given Open Datafusion Project to configure pipeline + When Expand Plugin group in the LHS plugins list: "Source" + When Select plugin: "CloudSQL MySQL" from the plugins list as: "Source" + When Expand Plugin group in the LHS plugins list: "Sink" + When Select plugin: "BigQuery" from the plugins list as: "Sink" + Then Connect plugins: "CloudSQL MySQL" and "BigQuery" to establish connection + Then Navigate to the properties page of plugin: "CloudSQL MySQL" + Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName" + Then Select radio button plugin property: "instanceType" with value: "public" + Then Replace input plugin property: "connectionName" with value: "connectionName" for Credentials and Authorization related fields + Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields + Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields + Then Enter input plugin property: "referenceName" with value: "RefName" + Then Enter input plugin property: "database" with value: "DatabaseName" + And Click on the Macro button of Property: "fetchSize" and set the value to: "fetchSize" + And Click on the Macro button of Property: "splitBy" and set the value to: "SplitBy" + Then Enter textarea plugin property: "importQuery" with value: "selectQuery" + Then Validate "CloudSQL MySQL" plugin properties + Then Close the Plugin Properties page + Then Navigate to the properties page of plugin: "BigQuery" + Then Replace input plugin property: "project" with value: "projectId" + Then Enter input plugin property: "datasetProject" with value: "projectId" + Then Enter input plugin property: "referenceName" with value: "BQReferenceName" + Then Enter input plugin property: "dataset" with value: "dataset" + Then Enter input plugin property: "table" with value: "bqTargetTable" + Then Validate "BigQuery" plugin properties + Then Close the Plugin Properties page + Then Save the pipeline + Then Preview and run the pipeline + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Run the preview of pipeline with runtime arguments + Then Wait till pipeline preview is in running state + Then Verify the preview of pipeline is "success" + Then Close the preview + Then Deploy the pipeline + Then Run the Pipeline in Runtime + Then Enter runtime argument value "fetchSize" for key "fetchSize" + Then Enter runtime argument value "splitBy" for key "SplitBy" + Then Run the Pipeline in Runtime with runtime arguments + Then Wait till pipeline is in running state + Then Open and capture logs + Then Verify the pipeline status is "Succeeded" + Then Close the pipeline logs + Then Validate the values of records transferred to target Big Query table is equal to the values from source table diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java new file mode 100644 index 000000000..55f268415 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/CloudMySqlClient.java @@ -0,0 +1,180 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin; + +import io.cdap.e2e.utils.PluginPropertyUtils; +import org.junit.Assert; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * CloudSQLMySQL client. + */ + +public class CloudMySqlClient { + + public static Connection getCloudSqlConnection() throws ClassNotFoundException, SQLException { + Class.forName("com.google.cloud.sql.mysql.SocketFactory"); + String instanceConnectionName = System.getenv("CLOUDSQL_MYSQL_CONNECTION_NAME"); + String database = PluginPropertyUtils.pluginProp("DatabaseName"); + String username = System.getenv("CLOUDSQL_MYSQL_USERNAME"); + String password = System.getenv("CLOUDSQL_MYSQL_PASSWORD"); + String jdbcUrl = String.format(PluginPropertyUtils.pluginProp("jdbcURL"), database, instanceConnectionName, + username, password); + Connection conn = DriverManager.getConnection(jdbcUrl); + return conn; + } + + public static int countRecord(String table) throws SQLException, ClassNotFoundException { + String countQuery = "SELECT COUNT(*) as total FROM " + table; + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement(); + ResultSet rs = statement.executeQuery(countQuery)) { + int num = 0; + while (rs.next()) { + num = (rs.getInt(1)); + } + return num; + } + } + + public static boolean validateRecordValues(String sourceTable, String targetTable) throws SQLException, + ClassNotFoundException { + String getSourceQuery = "SELECT * FROM " + sourceTable; + String getTargetQuery = "SELECT * FROM " + targetTable; + try (Connection connect = getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement2 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getSourceQuery); + ResultSet rsTarget = statement2.executeQuery(getTargetQuery); + return compareResultSetData(rsSource, rsTarget); + } + } + + /** + * Compares the result Set data in source table and sink table. + * + * @param rsSource result set of the source table data + * @param rsTarget result set of the target table data + * @return true if rsSource matches rsTarget + */ + public static boolean compareResultSetData(ResultSet rsSource, ResultSet rsTarget) throws SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + ResultSetMetaData mdTarget = rsTarget.getMetaData(); + int columnCountSource = mdSource.getColumnCount(); + int columnCountTarget = mdTarget.getColumnCount(); + Assert.assertEquals("Number of columns in source and target are not equal", + columnCountSource, columnCountTarget); + while (rsSource.next() && rsTarget.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + if (columnType == Types.TIMESTAMP) { + GregorianCalendar gc = new GregorianCalendar(TimeZone.getTimeZone("UTC")); + gc.setGregorianChange(new Date(Long.MIN_VALUE)); + Timestamp sourceTS = rsSource.getTimestamp(currentColumnCount, gc); + Timestamp targetTS = rsTarget.getTimestamp(currentColumnCount, gc); + Assert.assertTrue(String.format("Different values found for column : %s", columnName), + sourceTS.equals(targetTS)); + } else { + String sourceString = rsSource.getString(currentColumnCount); + String targetString = rsTarget.getString(currentColumnCount); + Assert.assertEquals(String.format("Different values found for column : %s", columnName), + String.valueOf(sourceString), String.valueOf(targetString)); + } + currentColumnCount++; + } + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + Assert.assertFalse("Number of rows in Target table is greater than the number of rows in Source table", + rsTarget.next()); + return true; + } + + public static void createSourceTable(String sourceTable) throws SQLException, ClassNotFoundException { + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + sourceTable + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValue1"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + sourceTable + " " + datatypesColumnsList + " " + + datatypesValues); + } + } + + public static void createTargetTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connection = getCloudSqlConnection(); + Statement statement = connection.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void createSourceDatatypesTable(String sourceTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createSourceTableQuery = "CREATE TABLE " + sourceTable + " " + datatypesColumns; + statement.executeUpdate(createSourceTableQuery); + // Insert dummy data. + String datatypesValues = PluginPropertyUtils.pluginProp("datatypesValue1"); + String datatypesColumnsList = PluginPropertyUtils.pluginProp("datatypesColumnsList"); + statement.executeUpdate("INSERT INTO " + sourceTable + " " + datatypesColumnsList + " " + datatypesValues); + } + } + + public static void createTargetDatatypesTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("datatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void createTargetCloudMysqlTable(String targetTable) throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String datatypesColumns = PluginPropertyUtils.pluginProp("CloudMySqlDatatypesColumns"); + String createTargetTableQuery = "CREATE TABLE " + targetTable + " " + datatypesColumns; + statement.executeUpdate(createTargetTableQuery); + } + } + + public static void deleteTable(String table) + throws SQLException, ClassNotFoundException { + try (Connection connect = getCloudSqlConnection(); Statement statement = connect.createStatement()) { + String dropTableQuery = "DROP TABLE " + table; + statement.execute(dropTableQuery); + } + } +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/BQValidation.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/BQValidation.java new file mode 100644 index 000000000..cd9640e7b --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/BQValidation.java @@ -0,0 +1,256 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudmysql; + +import com.google.cloud.bigquery.TableResult; +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import org.apache.spark.sql.types.Decimal; +import org.junit.Assert; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Types; +import java.text.ParseException; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Base64; +import java.util.List; + +/** + * BQValidation. + */ + +public class BQValidation { + + public static List bigQueryResponse = new ArrayList<>(); + public static List bigQueryRows = new ArrayList<>(); + public static Gson gson = new Gson(); + + public static void main(String[] args) throws SQLException, ClassNotFoundException, IOException, + InterruptedException, ParseException { +// TestSetupHooks.createTables(); +// CloudMySqlClient.createSourceTable("SourceTable_UCQKhyegmP"); + validateDBAndBQRecordValues("SourceTable_rDCpLQlVih", "E2E_TARGET_00225537_83f2_4403_a18b_cc829652510c"); + } + /** + * Extracts entire data from source and target tables. + * + * @param sourceTable table at the source side. + * @param targetTable table at the sink side. + * @return true if the values in source and target side are equal. + */ + + public static boolean validateBQAndDBRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { + getBigQueryTableData(sourceTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = gson.fromJson(String.valueOf(rows), JsonObject.class); + bigQueryResponse.add(json); + } + String getSourceQuery = "SELECT * FROM " + targetTable; + try (Connection connect = CloudMySqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsTarget = statement1.executeQuery(getSourceQuery); + return compareResultSetWithJsonData(rsTarget, bigQueryResponse); + } + } + + public static boolean validateDBAndBQRecordValues(String sourceTable, String targetTable) + throws SQLException, ClassNotFoundException, IOException, InterruptedException, ParseException { + getBigQueryTableData(targetTable, bigQueryRows); + for (Object rows : bigQueryRows) { + JsonObject json = gson.fromJson(String.valueOf(rows), JsonObject.class); + bigQueryResponse.add(json); + } + String getTargetQuery = "SELECT * FROM " + sourceTable; + try (Connection connect = CloudMySqlClient.getCloudSqlConnection()) { + connect.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); + Statement statement1 = connect.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE, + ResultSet.HOLD_CURSORS_OVER_COMMIT); + ResultSet rsSource = statement1.executeQuery(getTargetQuery); + return compareResultSetWithJsonData(rsSource, bigQueryResponse); + } + } + + /** + * Retrieves the data from a specified BigQuery table and populates it into the provided list of objects. + * + * @param table The name of the BigQuery table to fetch data from. + * @param bigQueryRows The list to store the fetched BigQuery data. + */ + + private static void getBigQueryTableData(String table, List bigQueryRows) + throws IOException, InterruptedException { + String projectId = PluginPropertyUtils.pluginProp("projectId"); + String dataset = PluginPropertyUtils.pluginProp("dataset"); + String selectQuery = "SELECT TO_JSON(t) FROM `" + projectId + "." + dataset + "." + table + "` AS t"; + TableResult result = BigQueryClient.getQueryResult(selectQuery); + result.iterateAll().forEach(value -> bigQueryRows.add(value.get(0).getValue())); + } + + /** + * Compares the data in the result set obtained from the CloudSqlMySql database with provided BigQuery JSON objects. + * + * @param rsSource The result set obtained from the CloudSql MySql database. + * @param bigQueryData The list of BigQuery JSON objects to compare with the result set data. + * @return True if the result set data matches the BigQuery data, false otherwise. + * @throws SQLException If an SQL error occurs during the result set operations. + */ + + public static boolean compareResultSetWithJsonData(ResultSet rsSource, List bigQueryData) throws + SQLException { + ResultSetMetaData mdSource = rsSource.getMetaData(); + boolean result = false; + int columnCountSource = mdSource.getColumnCount(); + + if (bigQueryData == null) { + Assert.fail("bigQueryData is null"); + return result; + } + + // Get the column count of the first JsonObject in bigQueryData + int columnCountTarget = 0; + if (bigQueryData.size() > 0) { + columnCountTarget = bigQueryData.get(0).entrySet().size(); + } + // Compare the number of columns in the source and target + Assert.assertEquals(columnCountSource, columnCountTarget); + + //Variable 'jsonObjectIdx' to track the index of the current JsonObject in the bigQueryData list, + int jsonObjectIdx = 0; + while (rsSource.next()) { + int currentColumnCount = 1; + while (currentColumnCount <= columnCountSource) { + String columnTypeName = mdSource.getColumnTypeName(currentColumnCount); + int columnType = mdSource.getColumnType(currentColumnCount); + String columnName = mdSource.getColumnName(currentColumnCount); + // Perform different comparisons based on column type + switch (columnType) { + // Since we skip BFILE in Oracle Sink, we are not comparing the BFILE source and sink values + case Types.BIT: + Boolean sourceBit = rsSource.getBoolean(currentColumnCount); + Boolean targetBit = Boolean.parseBoolean(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceBit), + String.valueOf(targetBit)); + break; + + case Types.SMALLINT: + case Types.INTEGER: + case Types.TINYINT: + Integer sourceTinyInt = rsSource.getInt(currentColumnCount); + Integer targetTinyInt = Integer.parseInt(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceTinyInt), + String.valueOf(targetTinyInt)); + break; + + case Types.REAL: + Float sourceFloat = rsSource.getFloat(currentColumnCount); + Float targetFloat = Float.parseFloat(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceFloat), + String.valueOf(targetFloat)); + break; + + case Types.DOUBLE: + Double sourceDouble = rsSource.getDouble(currentColumnCount); + Double targetDouble = Double.parseDouble(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceDouble), + String.valueOf(targetDouble)); + break; + + case Types.DATE: + Date sourceDate = rsSource.getDate(currentColumnCount); + Date targetDate = java.sql.Date.valueOf(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceDate), + String.valueOf(targetDate)); + break; + + case Types.TIME: + Time sourceTime = rsSource.getTime(currentColumnCount); + Time targetTime = Time.valueOf(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceTime), + String.valueOf(targetTime)); + break; + + case Types.DECIMAL: + Decimal sourceDecimal = Decimal.fromDecimal(rsSource.getBigDecimal(currentColumnCount)); + Decimal targetDecimal = Decimal.fromDecimal( + bigQueryData.get(jsonObjectIdx).get(columnName).getAsBigDecimal()); + Assert.assertEquals("Different values found for column : %s", sourceDecimal, targetDecimal); + break; + + case Types.BLOB: + case Types.VARBINARY: + case Types.LONGVARBINARY: + case Types.BINARY: + String sourceB64String = new String(Base64.getEncoder().encode(rsSource.getBytes(currentColumnCount))); + String targetB64String = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals("Different values found for column : %s", + sourceB64String, targetB64String); + break; + + case Types.NUMERIC: + long sourceVal = rsSource.getLong(currentColumnCount); + long targetVal = Long.parseLong(bigQueryData.get(jsonObjectIdx).get(columnName).getAsString()); + Assert.assertEquals("Different values found for column : %s", String.valueOf(sourceVal), + String.valueOf(targetVal)); + break; + + case Types.TIMESTAMP: + String sourceTS = String.valueOf(rsSource.getTimestamp(currentColumnCount)); + String targetTS = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + LocalDateTime timestamp = LocalDateTime.parse(targetTS, DateTimeFormatter.ISO_DATE_TIME); + ZonedDateTime utcDateTime = ZonedDateTime.of(timestamp, ZoneOffset.UTC); + ZoneId systemTimeZone = ZoneId.systemDefault(); + ZonedDateTime convertedDateTime = utcDateTime.withZoneSameInstant(systemTimeZone); + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.S"); + String formattedTimestamp = convertedDateTime.format(formatter); + Assert.assertEquals(sourceTS, formattedTimestamp); + break; + + default: + String sourceString = rsSource.getString(currentColumnCount); + String targetString = bigQueryData.get(jsonObjectIdx).get(columnName).getAsString(); + Assert.assertEquals(String.format("Different %s values found for column : %s", columnTypeName, columnName), + String.valueOf(sourceString), String.valueOf(targetString)); + break; + } + currentColumnCount++; + } + jsonObjectIdx++; + } + Assert.assertFalse("Number of rows in Source table is greater than the number of rows in Target table", + rsSource.next()); + return true; + } +} + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/package-info.java new file mode 100644 index 000000000..da5e5b8ad --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains BQ validation. + */ +package io.cdap.plugin.cloudmysql; + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/TestRunner.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/TestRunner.java new file mode 100644 index 000000000..f3883a2e9 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/TestRunner.java @@ -0,0 +1,42 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudmysql.runners; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute Mysql plugin test cases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.cloudmysql.stepsdesign"}, + tags = {"@CloudMySql and not @PLUGIN-20670 and not @PLUGIN-1633"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/CDAP-20670, + https://cdap.atlassian.net/browse/PLUGIN-1633 + */ + plugin = {"pretty", "html:target/cucumber-html-report/CloudMySql", + "json:target/cucumber-reports/cucumber-mysql.json", + "junit:target/cucumber-reports/cucumber-mysql.xml"} +) +public class TestRunner { +} + + + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/TestRunnerRequired.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/TestRunnerRequired.java new file mode 100644 index 000000000..1f9916f4c --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/TestRunnerRequired.java @@ -0,0 +1,39 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudmysql.runners; + +import io.cucumber.junit.Cucumber; +import io.cucumber.junit.CucumberOptions; +import org.junit.runner.RunWith; + +/** + * Test Runner to execute only required CloudSQLMySQL plugin testcases. + */ +@RunWith(Cucumber.class) +@CucumberOptions( + features = {"src/e2e-test/features"}, + glue = {"stepsdesign", "io.cdap.plugin.common.stepsdesign", "io.cdap.plugin.cloudmysql.stepsdesign"}, + tags = {"@CloudMySql_Required and not @PLUGIN-20670"}, + /* TODO :Enable tests once issue fixed https://cdap.atlassian.net/browse/CDAP-20670 +*/ + monochrome = true, + plugin = {"pretty", "html:target/cucumber-html-report/CloudMySql", + "json:target/cucumber-reports/cucumber-mysql.json", + "junit:target/cucumber-reports/cucumber-mysql.xml"} +) +public class TestRunnerRequired { +} + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/package-info.java new file mode 100644 index 000000000..95325299f --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/runners/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2022 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the runner for the CloudSQLMySQL plugin. + */ + +package io.cdap.plugin.cloudmysql.runners; diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/stepsdesign/CloudMysql.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/stepsdesign/CloudMysql.java new file mode 100644 index 000000000..0960465d5 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/stepsdesign/CloudMysql.java @@ -0,0 +1,81 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.cloudmysql.stepsdesign; + +import io.cdap.e2e.pages.actions.CdfPipelineRunAction; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.CdfHelper; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import io.cdap.plugin.cloudmysql.BQValidation; +import io.cucumber.java.en.Then; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.sql.SQLException; +import java.text.ParseException; + +/** + * CloudSqlMySql Plugin related step design. + */ +public class CloudMysql implements CdfHelper { + @Then("Validate the values of records transferred to target table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetTableIsEqualToTheValuesFromSourceTable() + throws SQLException, ClassNotFoundException { + int countRecords = CloudMySqlClient.countRecord(PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertEquals("Number of records transferred should be equal to records out ", + countRecords, recordOut()); + BeforeActions.scenario.write(" ******** Number of records transferred ********:" + countRecords); + boolean recordsMatched = CloudMySqlClient.validateRecordValues(PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target Big Query table is equal to the values from source table") + public void validateTheValuesOfRecordsTransferredToTargetBigQueryTableIsEqualToTheValuesFromSourceTable() + throws InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int targetBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqTargetTable")); + BeforeActions.scenario.write("No of Records Transferred to BigQuery:" + targetBQRecordsCount); + Assert.assertEquals("Out records should match with target BigQuery table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), targetBQRecordsCount); + + boolean recordsMatched = BQValidation.validateDBAndBQRecordValues( + PluginPropertyUtils.pluginProp("sourceTable"), + PluginPropertyUtils.pluginProp("bqTargetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } + + @Then("Validate the values of records transferred to target CloudSQLMySql table is equal to the values from source " + + "BigQuery table") + public void validateTheValuesOfRecordsTransferredToTargetCloudSQLMySqlTableIsEqualToTheValuesFromSourceBigQueryTable() + throws InterruptedException, IOException, SQLException, ClassNotFoundException, ParseException { + int sourceBQRecordsCount = BigQueryClient.countBqQuery(PluginPropertyUtils.pluginProp("bqSourceTable")); + BeforeActions.scenario.write("No of Records from source BigQuery table:" + sourceBQRecordsCount); + Assert.assertEquals("Out records should match with target PostgreSQL table records count", + CdfPipelineRunAction.getCountDisplayedOnSourcePluginAsRecordsOut(), sourceBQRecordsCount); + + boolean recordsMatched = BQValidation.validateBQAndDBRecordValues( + PluginPropertyUtils.pluginProp("bqSourceTable"), + PluginPropertyUtils.pluginProp("targetTable")); + Assert.assertTrue("Value of records transferred to the target table should be equal to the value " + + "of the records in the source table", recordsMatched); + } +} + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/stepsdesign/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/stepsdesign/package-info.java new file mode 100644 index 000000000..0bbbf6bd0 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/cloudmysql/stepsdesign/package-info.java @@ -0,0 +1,24 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for CloudMysql features. + */ + +package io.cdap.plugin.cloudmysql.stepsdesign; + + + diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java new file mode 100644 index 000000000..ca33d3fcf --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/TestSetupHooks.java @@ -0,0 +1,212 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ +package io.cdap.plugin.common.stepsdesign; + +import com.google.cloud.bigquery.BigQueryException; +import io.cdap.e2e.utils.BigQueryClient; +import io.cdap.e2e.utils.PluginPropertyUtils; +import io.cdap.plugin.CloudMySqlClient; +import io.cucumber.java.After; +import io.cucumber.java.Before; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; +import stepsdesign.BeforeActions; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.SQLException; +import java.util.NoSuchElementException; +import java.util.UUID; + +/** + * CLOUDSQL MYSQL test hooks. + */ + +public class TestSetupHooks { + public static void setTableName() { + String randomString = RandomStringUtils.randomAlphabetic(10); + String sourceTableName = String.format("SourceTable_%s", randomString); + String targetTableName = String.format("TargetTable_%s", randomString); + PluginPropertyUtils.addPluginProp("sourceTable", sourceTableName); + PluginPropertyUtils.addPluginProp("targetTable", targetTableName); + PluginPropertyUtils.addPluginProp("selectQuery", String.format("select * from %s", sourceTableName)); + } + + @Before(order = 1) + public static void initializeDBProperties() { + String username = System.getenv("username"); + if (username != null && !username.isEmpty()) { + PluginPropertyUtils.addPluginProp("username", username); + } + String password = System.getenv("password"); + if (password != null && !password.isEmpty()) { + PluginPropertyUtils.addPluginProp("password", password); + } + TestSetupHooks.setTableName(); + } + + @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_TEST") + public static void createSourceTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createSourceTable(PluginPropertyUtils.pluginProp("sourceTable")); + BeforeActions.scenario.write("CLOUDMYSQL Source table - " + PluginPropertyUtils.pluginProp("sourceTable") + + " created successfully"); + } + @After(order = 2, value = "@CLOUDMYSQL_SOURCE_TEST") + public static void dropSourceTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.deleteTable(PluginPropertyUtils.pluginProp("sourceTable")); + BeforeActions.scenario.write("CLOUDMYSQL Source Table - " + PluginPropertyUtils.pluginProp("sourceTable") + + " deleted successfully"); + } + @Before(order = 2, value = "@CLOUDMYSQL_TARGET_TEST") + public static void createTargetTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createTargetTable(PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("CLOUDMYSQL Target table - " + PluginPropertyUtils.pluginProp("targetTable") + + " created successfully"); + } + + + @After(order = 2, value = "@CLOUDMYSQL_TARGET_TEST") + public static void dropTargetTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.deleteTable(PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("CLOUDMYSQL Target table - " + PluginPropertyUtils.pluginProp("targetTable") + + " deleted successfully"); + } + @Before(order = 2, value = "@CLOUDMYSQL_SOURCE_DATATYPES_TEST") + public static void createDatatypesSourceTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createSourceDatatypesTable(PluginPropertyUtils.pluginProp("sourceTable")); + BeforeActions.scenario.write("CLOUDMYSQL Source DataTypes table - " + + PluginPropertyUtils.pluginProp("sourceTable") + " created successfully"); + } + + @After(order = 2, value = "@CLOUDMYSQL_SOURCE_DATATYPES_TEST") + public static void dropDataTypesSourceTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.deleteTable(PluginPropertyUtils.pluginProp("sourceTable")); + BeforeActions.scenario.write("CLOUDMYSQL Source DataTypes table - " + + PluginPropertyUtils.pluginProp("sourceTable") + " deleted successfully"); + } + + @Before(order = 2, value = "@CLOUDMYSQL_TARGET_DATATYPES_TEST") + public static void createDatatypesTargetTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createTargetDatatypesTable(PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("CLOUDMYSQL Target DataTypes table - " + + PluginPropertyUtils.pluginProp("targetTable") + " created successfully"); + } + + @After(order = 2, value = "@CLOUDMYSQL_TARGET_DATATYPES_TEST") + public static void dropDataTypesTargetTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.deleteTable(PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("CLOUDMYSQL Target DataTypes Table - " + + PluginPropertyUtils.pluginProp("targetTable") + " deleted successfully"); + } + + @Before(order = 2, value = "@CLOUDMYSQL_TEST_TABLE") + public static void createCloudMysqlTestTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.createTargetCloudMysqlTable(PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("CLOUDMYSQL Target DataTypes table - " + + PluginPropertyUtils.pluginProp("targetTable") + " created successfully"); + } + @After(order = 2, value = "@CLOUDMYSQL_TEST_TABLE") + public static void dropCloudMysqlTestTable() throws SQLException, ClassNotFoundException { + CloudMySqlClient.deleteTable(PluginPropertyUtils.pluginProp("targetTable")); + BeforeActions.scenario.write("CLOUDMYSQL Target DataTypes Table - " + + PluginPropertyUtils.pluginProp("targetTable") + " deleted successfully"); + } + + @Before(order = 1, value = "@BQ_SINK_TEST") + public static void setTempTargetBQTableName() { + String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_"); + PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName); + BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName); + } + + @After(order = 1, value = "@BQ_SINK_TEST") + public static void deleteTempTargetBQTable() throws IOException, InterruptedException { + String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable"); + try { + BigQueryClient.dropBqQuery(bqTargetTableName); + BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqTargetTable"); + } catch (BigQueryException e) { + if (e.getMessage().contains("Not found: Table")) { + BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist"); + } else { + Assert.fail(e.getMessage()); + } + } + } + + @Before(order = 2, value = "@BQ_SOURCE_TEST") + public static void createTempSourceBQTable() throws IOException, InterruptedException { + createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"), + PluginPropertyUtils.pluginProp("InsertBQDataQueryFile")); + } + + @After(order = 1, value = "@BQ_SOURCE_TEST") + public static void deleteTempSourceBQTable() throws IOException, InterruptedException { + String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable"); + BigQueryClient.dropBqQuery(bqSourceTable); + BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully"); + PluginPropertyUtils.removePluginProp("bqSourceTable"); + } + + private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile) throws + IOException, InterruptedException, NullPointerException { + String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0, 5).replaceAll("-", + "_"); + + String createTableQuery = StringUtils.EMPTY; + try { + createTableQuery = new String( + Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource("/" + bqCreateTableQueryFile).toURI())), + StandardCharsets.UTF_8); + createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + e.printStackTrace(); + BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage()); + Assert.fail( + "Exception in BigQuery testdata prerequisite setup " + "- error in reading create table query file " + + e.getMessage()); + } + + String insertDataQuery = StringUtils.EMPTY; + try { + insertDataQuery = new String( + Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource("/" + bqInsertDataQueryFile).toURI())), + StandardCharsets.UTF_8); + insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset")) + .replace("TABLE_NAME", bqSourceTable); + } catch (Exception e) { + BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage()); + Assert.fail( + "Exception in BigQuery testdata prerequisite setup " + "- error in reading insert data query file " + + e.getMessage()); + + } + BigQueryClient.getSoleQueryResult(createTableQuery); + try { + BigQueryClient.getSoleQueryResult(insertDataQuery); + } catch (NoSuchElementException e) { + // Insert query does not return any record. + // Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException + } + PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable); + BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully"); + } +} diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java new file mode 100644 index 000000000..63f8efabc --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/common/stepsdesign/package-info.java @@ -0,0 +1,20 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains the stepDesign for common features. + */ +package io.cdap.plugin.common.stepsdesign; diff --git a/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/package-info.java b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/package-info.java new file mode 100644 index 000000000..c48152c19 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/java/io/cdap/plugin/package-info.java @@ -0,0 +1,21 @@ +/* + * Copyright © 2023 Cask Data, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy of + * the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + */ + +/** + * Package contains CloudMySqlClient. + */ +package io.cdap.plugin; + diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt new file mode 100644 index 000000000..1188d6591 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryCreateTableQuery.txt @@ -0,0 +1,2 @@ +create table `DATASET.TABLE_NAME` (COL1 BYTES, COL2 STRING, COL3 DATE, COL4 FLOAT64, COL6 TIMESTAMP, +COL8 BOOL, COL9 INT64, COL10 TIME) diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt new file mode 100644 index 000000000..e35742887 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/BigQuery/BigQueryInsertDataQuery.txt @@ -0,0 +1,3 @@ +insert into `DATASET.TABLE_NAME` (COL1, COL2, COL3, COL4, COL6, COL8, COL9, COL10) values +(b'01011011','priya','2021-01-26',1.110,'2019-03-10 04:50:01 UTC',false,92233720,'21:26:00'), +(b'01011011','surya','2021-01-26',1.110000001,'2018-03-10 04:50:01 UTC',true,92233729,'20:26:00'); diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties new file mode 100644 index 000000000..5ff3357f2 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/errorMessage.properties @@ -0,0 +1,21 @@ +errorMessageInvalidSourceDatabase=SQL error while getting query schema +errorMessageInvalidImportQuery=Import Query select must contain the string '$CONDITIONS'. if Number of Splits is not set\ + \ to 1. Include '$CONDITIONS' in the Import Query +errorMessageCloudMySqlInvalidReferenceName=Invalid reference name +errorMessageBlankUsername=Username is required when password is given. +errorMessageBlankPassword=SQL error while getting query schema +errorMessageInvalidFetchSize=Invalid fetch size. Fetch size must be a positive integer. +errorMessageBlankSplitBy=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageInvalidNumberOfSplits=Invalid value for Number of Splits '0'. Must be at least 1. Specify a Number of Splits no less than 1. +errorMessageNumberOfSplitNotNumber=Unable to create config for batchsource +errorMessageNumberOfSplits=Split-By Field Name must be specified if Number of Splits is not set to 1. Specify the Split-by Field Name. +errorMessageBoundingQuery=Bounding Query must be specified if Number of Splits is not set to 1. Specify the Bounding Query. +errorMessageInvalidSinkDatabase=Error encountered while configuring the stage: 'URLDecoder: Illegal hex characters in escape (%) pattern - For input string: "$^"' +errorMessageInvalidTableName=Table 'Invalidtable' does not exist. Ensure table 'Invalidtable' is set correctly and +errorMessageConnectionName=Connection Name must be in the format :: to connect to a public CloudSQL PostgreSQL instance. +validationSuccessMessage=No errors found. +validationErrorMessage=COUNT ERROR found +errorLogsMessageInvalidTableName=Spark program 'phase-1' failed with error: Errors were encountered during validation. \ + Table +errorLogsMessageInvalidCredentials =Spark program 'phase-1' failed with error: Errors were encountered during validation. +errorLogsMessageInvalidBoundingQuery=Spark program 'phase-1' failed with error: You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near 'table' at line 1. Please check the system logs for more details. diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties new file mode 100644 index 000000000..74a4271fb --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginDataCyAttributes.properties @@ -0,0 +1,12 @@ +referenceName=referenceName +enableQuotedValues=switch-enableQuotedValues +inputsToLoadMemory=inMemoryInputs +projectId=project +datasetProjectId=datasetProject +dataset=dataset +table=table +truncateTable=switch-truncateTable +truncateTableMacroInput=truncateTable +updateTableSchema=switch-allowSchemaRelaxation +updateTableSchemaMacroInput=allowSchemaRelaxation +outputSchemaMacroInput=Output Schema-macro-input diff --git a/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties new file mode 100644 index 000000000..b0d7435b3 --- /dev/null +++ b/cloudsql-mysql-plugin/src/e2e-test/resources/pluginParameters.properties @@ -0,0 +1,88 @@ +driverName=cloudsql-mysql +username=CLOUDSQL_MYSQL_USERNAME +password=CLOUDSQL_MYSQL_PASSWORD +DatabaseName=cdfmysqldb +connectionName=CLOUDSQL_MYSQL_CONNECTION_NAME +connectionArgumentsList=[{"key":"numSplits","value":"1"}] +invalidconnectionArgumentsList=[{"key":"numSplits","value":"%$^&#"}] +invalidImportQuery=select +numberOfSplits=2 +invalidRef=invalidRef&^*&&* +zeroValue=0 +zeroSplits=isha +insertQuery= select * from mytable +CloudMySqlImportQuery=select * from mytable +fetchSize=1000 +NumSplits=1 +SplitBy=ID +jdbcURL=jdbc:mysql:///%s?cloudSqlInstance=%s&socketFactory=com.google.cloud.sql.mysql.SocketFactory&user=%s&password=%s +projectId=cdf-athena +datasetprojectId=cdf-athena +BQReferenceName=reference +targetTable=mytable5 +bqDatasetId=1234 +dataset=test_automation +bqSourceTable=mysql +table=myTable +name=NAME +pass=PASS +invalidUserName=testUser +invalidPassword=testPassword +invalidTable=data +#CloudMySqlDriverName=cloudsql-mysql +bqTruncateTable=truncateTable +bqUpdateTableSchema=updateSchema +invalidDatabaseName=invalidDB%$^%* +invalidboundQuery=SELECT MIN(id),MAX(id) FROM table +cloudsqlimportQuery=where $CONDITIONS; +splitby=ID +numbersplitsgenerate=2 +ConnectionTimeout=100 +invalidTablename=Table123 +dataOutputSchema=[{"key":"fname","value":"string"},{"key":"lname","value":"string"},{"key":"cost","value":"double"},\ + {"key":"zipcode","value":"int"}] +OutputSchema=[{"key":"id","value":"int"},{"key":"lastName","value":"string"}] +outputDatatypesSchema2=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"},{"key":"COL2","value":"bytes"},\ + {"key":"COL3","value":"bytes"},{"key":"COL4","value":"string"},{"key":"COL5","value":"string"},\ + {"key":"COL6","value":"bytes"}] +outputDatatypesSchema3=[{"key":"ID","value":"string"},{"key":"COL1","value":"bytes"}] +outputDatatypesSchema4=[{"key":"ID","value":"string"},{"key":"COL1","value":"string"}] +splitBy=column name +invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table +outputDatatypesSchema1=[{"key":"COL23","value":"double"},{"key":"COL28","value":"timestamp"},\ + {"key":"COL29","value":"timestamp"},{"key":"COL30","value":"string"},{"key":"COL31","value":"string"},\ + {"key":"COL32","value":"string"},{"key":"COL33","value":"timestamp"},{"key":"COL34","value":"float"},\ + {"key":"COL35","value":"double"}] + +datatypesColumns=(ID varchar(100) PRIMARY KEY, COL1 bit(1), COL2 tinyint(20), COL3 boolean, COL4 smallint(10), \ + COL5 blob, COL6 mediumint(10), COL7 int(11), COL8 bigint(1), COL9 float, COL10 date, COL11 datetime, \ + COL12 decimal(10,0), COL13 double, COL14 enum('A','B','C'), COL15 time, COL16 timestamp, COL18 char(1),\ + COL19 binary(1), COL20 tinytext, COL21 varbinary(100), COL22 tinyblob, COL23 mediumblob, \ + COL24 blob, COL25 text, COL26 mediumtext, COL27 longblob, COL28 longtext, COL29 set('X','y','Z')) +datatypesColumnsList=(ID,COL1,COL2,COL3,COL4,COL5,COL6,COL7,COL8,COL9,COL10,COL11,COL12,COL13,COL14,COL15,COL16,\ + COL18,COL19,COL20,COL21,COL22,COL23,COL24,COL25,COL26,COL27,COL28,COL29) +datatypesValue1=VALUES ('User1',1,-1,true,-32768,HEX('27486920546869732069732061206C6F6E6720746578742E27'),0,25,\ +-9223372036854775808,22.0,'2023-01-01','2023-01-01 00:00:00',1234,1234.5678,'A','00:00:00','2023-01-01 00:00:00',\ + 'P',1,'This is a test message',1,HEX('27486920546869732069732061206C6F6E6720746578742E27'),\ + HEX('27486920546869732069732061206C6F6E6720746578742E27'),HEX('27486920546869732069732061206C6F6E6720746578742E27'),\ + 'This is a test message','This is a test message',HEX('27486920546869732069732061206C6F6E6720746578742E27'),\ + 'This is a test message to check ','X') +datatypesSchema=[{"key":"ID","value":"string"},{"key":"COL1","value":"boolean"},{"key":"COL2","value":"int"},\ + {"key":"COL3","value":"boolean"},{"key":"COL4","value":"int"},{"key":"COL5","value":"bytes"},\ + {"key":"COL6","value":"int"},{"key":"COL7","value":"int"},{"key":"COL8","value":"long"},\ + {"key":"COL9","value":"float"},{"key":"COL10","value":"date"},{"key":"COL11","value":"timestamp"},\ + {"key":"COL12","value":"decimal"},{"key":"COL13","value":"double"},{"key":"COL14","value":"string"},\ + {"key":"COL15","value":"time"},{"key":"COL16","value":"timestamp"},\ + {"key":"COL18","value":"string"},{"key":"COL19","value":"bytes"},{"key":"COL20","value":"string"},\ + {"key":"COL21","value":"bytes"},{"key":"COL22","value":"bytes"},{"key":"COL23","value":"bytes"},\ + {"key":"COL24","value":"bytes"},{"key":"COL25","value":"string"},{"key":"COL26","value":"string"},\ + {"key":"COL27","value":"bytes"},{"key":"COL28","value":"string"},{"key":"COL29","value":"string"}] + +bqOutputMultipleDatatypesSchema= [{"key":"COL1","value":"bytes"},{"key":"COL2","value":"string"},\ + {"key":"COL3","value":"date"},{"key":"COL4","value":"double"},{"key":"COL6","value":"timestamp"},\ + {"key":"COL8","value":"boolean"},{"key":"COL9","value":"long"},{"key":"COL10","value":"time"}] +CloudMySqlDatatypesColumns=(COL1 VARBINARY(100) , COL2 VARCHAR(100), COL3 DATE, COL4 DOUBLE,\ + COL6 TIMESTAMP, COL8 BIT, COL9 BIGINT, COL10 TIME) +#bq queries file path +CreateBQTableQueryFile=BigQuery/BigQueryCreateTableQuery.txt +InsertBQDataQueryFile=BigQuery/BigQueryInsertDataQuery.txt