Skip to content

Commit

Permalink
Wrangler plugin e2e tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
priyabhatnagar25 committed Jul 18, 2023
1 parent 5886a9f commit 43c1334
Show file tree
Hide file tree
Showing 20 changed files with 1,155 additions and 1 deletion.
125 changes: 124 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@
<simplemagic.version>1.11</simplemagic.version>
<slf4j.version>1.7.15</slf4j.version>
<unix4j.version>0.4</unix4j.version>
<testSourceLocation>${project.basedir}/src/test/java/</testSourceLocation>
</properties>

<repositories>
Expand Down Expand Up @@ -172,6 +173,7 @@
</dependencies>

<build>
<testSourceDirectory>${testSourceLocation}</testSourceDirectory>
<pluginManagement>
<plugins>
<plugin>
Expand Down Expand Up @@ -232,6 +234,7 @@
<exclude>wrangler-demos/**</exclude>
<exclude>**/com/example/**</exclude>
<exclude>/**/icons/**</exclude>

</excludes>
</configuration>
</execution>
Expand Down Expand Up @@ -397,7 +400,6 @@
<releaseProfiles>releases</releaseProfiles>
</configuration>
</plugin>

<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
Expand Down Expand Up @@ -429,6 +431,127 @@
</plugins>
</build>
</profile>
<profile>
<id>e2e-tests</id>
<properties>
<testSourceLocation>src/e2e-test/java</testSourceLocation>
<TEST_RUNNER>TestRunner.java</TEST_RUNNER>
</properties>
<build>
<testResources>
<testResource>
<directory>src/e2e-test/resources</directory>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.0.0-M5</version>
<configuration>
<includes>
<include>${TEST_RUNNER}</include>
</includes>
<!--Start configuration to run TestRunners in parallel-->
<parallel>classes</parallel> <!--Running TestRunner classes in parallel-->
<threadCount>2</threadCount> <!--Number of classes to run in parallel-->
<forkCount>2</forkCount> <!--Number of JVM processes -->
<reuseForks>true</reuseForks>
<!--End configuration to run TestRunners in parallel-->
<environmentVariables>
<GOOGLE_APPLICATION_CREDENTIALS>
${GOOGLE_APPLICATION_CREDENTIALS}
</GOOGLE_APPLICATION_CREDENTIALS>
<SERVICE_ACCOUNT_TYPE>
${SERVICE_ACCOUNT_TYPE}
</SERVICE_ACCOUNT_TYPE>
<SERVICE_ACCOUNT_FILE_PATH>
${SERVICE_ACCOUNT_FILE_PATH}
</SERVICE_ACCOUNT_FILE_PATH>
<SERVICE_ACCOUNT_JSON>
${SERVICE_ACCOUNT_JSON}
</SERVICE_ACCOUNT_JSON>
</environmentVariables>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
</goals>
</execution>
</executions>
</plugin>

<plugin>
<groupId>net.masterthought</groupId>
<artifactId>maven-cucumber-reporting</artifactId>
<version>5.5.0</version>

<executions>
<execution>
<id>execution</id>
<phase>verify</phase>
<goals>
<goal>generate</goal>
</goals>
<configuration>
<projectName>Cucumber Reports</projectName> <!-- Replace with project name -->
<outputDirectory>target/cucumber-reports/advanced-reports</outputDirectory>
<buildNumber>1</buildNumber>
<skip>false</skip>
<inputDirectory>${project.build.directory}/cucumber-reports</inputDirectory>
<jsonFiles> <!-- supports wildcard or name pattern -->
<param>**/*.json</param>
</jsonFiles> <!-- optional, defaults to outputDirectory if not specified -->
<classificationDirectory>${project.build.directory}/cucumber-reports</classificationDirectory>
<checkBuildResult>true</checkBuildResult>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>31.0.1-jre</version>
</dependency>
</dependencies>
</dependencyManagement>

<dependencies>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.15</version>
</dependency>

<dependency>
<groupId>io.cdap.tests.e2e</groupId>
<artifactId>cdap-e2e-framework</artifactId>
<version>0.3.0-SNAPSHOT</version>
</dependency>

<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.8</version>
<scope>runtime</scope>
</dependency>
</dependencies>

</profile>
</profiles>
</project>

91 changes: 91 additions & 0 deletions wrangler-core/src/e2e-test/features/Wrangler/RunTime.feature
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# Copyright © 2023 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.

@Wrangler
Feature: Wrangler - Run time scenarios

@BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario: To verify User is able to run a pipeline using the copy count and delete directives in the wrangler plugin
Given Open Datafusion Project to configure pipeline
Then Click on the Plus Green Button to import the pipelines
Then Select the json files for importing the pipelines for the plugin "Directive_copy_drop_count_setcolmn"
Then Navigate to the properties page of plugin: "BigQueryTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Click on the Validate button
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Replace input plugin property: "table" with value: "bqTargetTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Click on the Validate button
Then Close the Plugin Properties page
Then Rename the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate The Data From BQ To BQ With Actual And Expected File for: "ExpectedDirective_copy_drop_count_setcolmn"

@BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario: To verify User is able to run a pipeline using the fill null and send to error directives in the wrangler plugin
Given Open Datafusion Project to configure pipeline
Then Click on the Plus Green Button to import the pipelines
Then Select the json files for importing the pipelines for the plugin "Directive_Fillempty_sendtoerror"
Then Navigate to the properties page of plugin: "BigQueryTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Click on the Validate button
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Replace input plugin property: "table" with value: "bqTargetTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Click on the Validate button
Then Close the Plugin Properties page
Then Rename the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate The Data From BQ To BQ With Actual And Expected File for: "ExpectedDirective_Fillempty_sendtoerror"

@BQ_SOURCE_TEST @BQ_SINK_TEST
Scenario: To verify User is able to run a pipeline using the Format,concatenate,title case and copy column directives in the wrangler plugin
Given Open Datafusion Project to configure pipeline
Then Click on the Plus Green Button to import the pipelines
Then Select the json files for importing the pipelines for the plugin "Directive_Concatenate_titlecase"
Then Navigate to the properties page of plugin: "BigQueryTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Replace input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Click on the Validate button
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "BigQuery2"
Then Replace input plugin property: "table" with value: "bqTargetTable"
Then Replace input plugin property: "dataset" with value: "dataset"
Then Click on the Validate button
Then Close the Plugin Properties page
Then Rename the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate The Data From BQ To BQ With Actual And Expected File for: "ExpectedDirective_Concatenate_titlecase"
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
/*
* Copyright © 2023 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package io.cdap.plugin.common.stepsdesign;

import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.storage.Blob;
import com.google.cloud.storage.StorageException;
import io.cdap.e2e.utils.BigQueryClient;
import io.cdap.e2e.utils.PluginPropertyUtils;
import io.cdap.e2e.utils.StorageClient;
import io.cucumber.java.After;
import io.cucumber.java.Before;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.Assert;
import stepsdesign.BeforeActions;

import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.SQLException;
import java.util.NoSuchElementException;
import java.util.UUID;

import static io.cdap.e2e.pages.locators.CdfGCSLocators.filePath;

/**
* BQ test hooks.
*/
public class TestSetupHooks {

@Before(order = 1, value = "@BQ_SINK_TEST")
public static void setTempTargetBQTableName() {
String bqTargetTableName = "E2E_TARGET_" + UUID.randomUUID().toString().replaceAll("-", "_");
PluginPropertyUtils.addPluginProp("bqTargetTable", bqTargetTableName);
BeforeActions.scenario.write("BQ Target table name - " + bqTargetTableName);
}

@After(order = 1, value = "@BQ_SINK_TEST")
public static void deleteTempTargetBQTable() throws IOException, InterruptedException {
String bqTargetTableName = PluginPropertyUtils.pluginProp("bqTargetTable");
try {
BigQueryClient.dropBqQuery(bqTargetTableName);
BeforeActions.scenario.write("BQ Target table - " + bqTargetTableName + " deleted successfully");
PluginPropertyUtils.removePluginProp("bqTargetTable");
} catch (BigQueryException e) {
if (e.getMessage().contains("Not found: Table")) {
BeforeActions.scenario.write("BQ Target Table " + bqTargetTableName + " does not exist");
} else {
Assert.fail(e.getMessage());
}
}
}

/**
* Create BigQuery table.
*/
@Before(order = 1, value = "@BQ_SOURCE_TEST")
public static void createTempSourceBQTable() throws IOException, InterruptedException {
createSourceBQTableWithQueries(PluginPropertyUtils.pluginProp("CreateBQTableQueryFile"),
PluginPropertyUtils.pluginProp("InsertBQDataQueryFile"));
}

@After(order = 1, value = "@BQ_SOURCE_TEST")
public static void deleteTempSourceBQTable() throws IOException, InterruptedException {
String bqSourceTable = PluginPropertyUtils.pluginProp("bqSourceTable");
BigQueryClient.dropBqQuery(bqSourceTable);
BeforeActions.scenario.write("BQ source Table " + bqSourceTable + " deleted successfully");
PluginPropertyUtils.removePluginProp("bqSourceTable");
}

private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile, String bqInsertDataQueryFile)
throws IOException, InterruptedException {
String bqSourceTable = "E2E_SOURCE_" + UUID.randomUUID().toString().substring(0, 5).replaceAll("-",
"_");

String createTableQuery = StringUtils.EMPTY;
try {
createTableQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource
("/" + bqCreateTableQueryFile).toURI()))
, StandardCharsets.UTF_8);
createTableQuery = createTableQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset"))
.replace("TABLE_NAME", bqSourceTable);
} catch (Exception e) {
BeforeActions.scenario.write("Exception in reading " + bqCreateTableQueryFile + " - " + e.getMessage());
Assert.fail("Exception in BigQuery testdata prerequisite setup " +
"- error in reading create table query file " + e.getMessage());
}

String insertDataQuery = StringUtils.EMPTY;
try {
insertDataQuery = new String(Files.readAllBytes(Paths.get(TestSetupHooks.class.getResource
("/" + bqInsertDataQueryFile).toURI()))
, StandardCharsets.UTF_8);
insertDataQuery = insertDataQuery.replace("DATASET", PluginPropertyUtils.pluginProp("dataset"))
.replace("TABLE_NAME", bqSourceTable);
} catch (Exception e) {
BeforeActions.scenario.write("Exception in reading " + bqInsertDataQueryFile + " - " + e.getMessage());
Assert.fail("Exception in BigQuery testdata prerequisite setup " +
"- error in reading insert data query file " + e.getMessage());
}
BigQueryClient.getSoleQueryResult(createTableQuery);
try {
BigQueryClient.getSoleQueryResult(insertDataQuery);
} catch (NoSuchElementException e) {
// Insert query does not return any record.
// Iterator on TableResult values in getSoleQueryResult method throws NoSuchElementException
}
PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable);
BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully");
}
}
Loading

0 comments on commit 43c1334

Please sign in to comment.