Skip to content

Commit

Permalink
Use Open Clover for code coverage analysis (#48)
Browse files Browse the repository at this point in the history
jacoco was interfering with surefire and failsafe making the IT tests
fail when instrumenting through Jacoco. Open Clover works just fine and
is compatible with codecov.

---------

Co-authored-by: Jayant Jain <jainjayant@google.com>
Co-authored-by: Jayant Jain <141257304+jayehwhyehentee@users.noreply.github.com>
  • Loading branch information
3 people authored Nov 27, 2023
1 parent 337134d commit 8fd3d5e
Show file tree
Hide file tree
Showing 10 changed files with 292 additions and 83 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ Prerequisites:

```
git clone https://github.com/GoogleCloudDataproc/flink-bigquery-connector
cd flink-connector-bigquery
cd flink-bigquery-connector
mvn clean package -DskipTests
```

Expand Down
16 changes: 2 additions & 14 deletions cloudbuild/cloudbuild.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,15 @@ steps:
env:
- 'CODECOV_TOKEN=${_CODECOV_TOKEN}'

# 3. Run unit tests
# 3. Run unit & integration tests
- name: 'gcr.io/$PROJECT_ID/dataproc-flink-bigquery-connector-presubmit'
id: 'unit-tests'
waitFor: ['init']
entrypoint: 'bash'
args: ['/workspace/cloudbuild/presubmit.sh', 'unittest']
args: ['/workspace/cloudbuild/presubmit.sh', 'tests']
env:
- 'CODECOV_TOKEN=${_CODECOV_TOKEN}'

# 4. Run integration tests concurrently with unit tests
# Commeneted out until integration tests are ported
# - name: 'gcr.io/$PROJECT_ID/dataproc-flink-bigquery-connector-presubmit'
# id: 'integration-tests'
# waitFor: ['unit-tests']
# entrypoint: 'bash'
# args: ['/workspace/cloudbuild/presubmit.sh', 'integrationtest']
# env:
# - 'GOOGLE_CLOUD_PROJECT=${_GOOGLE_CLOUD_PROJECT}'
# - 'TEMPORARY_GCS_BUCKET=${_TEMPORARY_GCS_BUCKET}'
# - 'CODECOV_TOKEN=${_CODECOV_TOKEN}'

# Tests take around 20 mins in general.
timeout: 1800s

Expand Down
13 changes: 4 additions & 9 deletions cloudbuild/presubmit.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,18 +30,13 @@ cd /workspace
case $STEP in
# Download maven and all the dependencies
init)
$MVN install -DskipTests
$MVN clean install -DskipTests
exit
;;

# Run unit tests
unittest)
$MVN test jacoco:report jacoco:report-aggregate
;;

# Run integration tests
integrationtest)
$MVN failsafe:integration-test failsafe:verify jacoco:report jacoco:report-aggregate
# Run unit & integration tests
tests)
$MVN clean clover:setup verify clover:aggregate clover:clover -Pclover -pl flink-connector-bigquery
;;

*)
Expand Down
48 changes: 0 additions & 48 deletions flink-connector-bigquery/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -162,58 +162,10 @@ under the License.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<executions>
<execution>
<id>default-test</id>
<phase>test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<argLine>${argLine} -XX:+UseG1GC -Xms256m -Xmx1024m</argLine>
</configuration>
</execution>
<execution>
<id>integration-tests</id>
<phase>integration-test</phase>
<goals>
<goal>test</goal>
</goals>
<configuration>
<argLine>-XX:+UseG1GC -Xms256m -Xmx2048m</argLine>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<id>prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<phase>install</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
Expand Down Expand Up @@ -82,16 +84,18 @@ public StorageReadClient getStorageClient(CredentialsOptions readOptions)
@Override
public QueryDataClient getQueryDataClient(CredentialsOptions readOptions) {
return new QueryDataClient() {

@Override
public List<String> retrieveTablePartitions(
String project, String dataset, String table) {
return Arrays.asList("2023062811");
return Arrays.asList(
LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMddHH")));
}

@Override
public Optional<Tuple2<String, StandardSQLTypeName>> retrievePartitionColumnName(
String project, String dataset, String table) {
return Optional.of(Tuple2.of("number", StandardSQLTypeName.INT64));
return Optional.of(Tuple2.of("ts", StandardSQLTypeName.TIMESTAMP));
}

@Override
Expand Down Expand Up @@ -191,8 +195,9 @@ public void close() {}

public static final String SIMPLE_AVRO_SCHEMA_FIELDS_STRING =
" \"fields\": [\n"
+ " {\"name\": \"name\", \"type\": \"string\"},\n"
+ " {\"name\": \"number\", \"type\": \"long\"}\n"
+ " {\"name\": \"name\", \"type\": \"string\"},\n"
+ " {\"name\": \"number\", \"type\": \"long\"},\n"
+ " {\"name\" : \"ts\", \"type\" : {\"type\" : \"long\",\"logicalType\" : \"timestamp-micros\"}}\n"
+ " ]\n";
public static final String SIMPLE_AVRO_SCHEMA_STRING =
"{\"namespace\": \"project.dataset\",\n"
Expand Down Expand Up @@ -225,6 +230,10 @@ public void close() {}
new TableFieldSchema()
.setName("number")
.setType("INTEGER")
.setMode("REQUIRED"),
new TableFieldSchema()
.setName("ts")
.setType("TIMESTAMP")
.setMode("REQUIRED")));

/** Represents the parameters needed for the Avro data generation. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ public void testSplitStateTransformation() {

BigQuerySourceSplitState splitState = new BigQuerySourceSplitState(originalSplit);
assertThat(splitState.toBigQuerySourceSplit()).isEqualTo(originalSplit);
assertThat(splitState)
.isEqualTo(new BigQuerySourceSplitState(splitState.toBigQuerySourceSplit()));
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,12 @@ public void testProject() {

@Test
public void testRestriction() {
String sqlFilter = "id = 0 AND NOT optString IS NULL";
String sqlFilter =
"id = 0"
+ " AND NOT optString IS NULL"
+ " AND optString LIKE 's%'"
+ " AND optDouble > -1"
+ " AND optDouble <= 1.0 ";
tEnv.executeSql(createTestDDl(null));

Iterator<Row> collected =
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
/*
* Copyright (C) 2023 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/

package com.google.cloud.flink.bigquery.table.restrictions;

import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;

import com.google.cloud.bigquery.StandardSQLTypeName;
import org.assertj.core.api.Assertions;
import org.junit.Test;

import java.util.List;

/** */
public class BigQueryPartitionTest {

@Test
public void testPartitionHour() {
List<String> partitionIds = Lists.newArrayList("2023062822", "2023062823");
// ISO formatted dates as single quote string literals at the beginning of the hour.
List<String> expectedValues =
Lists.newArrayList("'2023-06-28 22:00:00'", "'2023-06-28 23:00:00'");
List<String> values =
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.TIMESTAMP);

Assertions.assertThat(values).isEqualTo(expectedValues);
}

@Test
public void testPartitionDay() {
List<String> partitionIds = Lists.newArrayList("20230628", "20230628");
// ISO formatted dates as single quote string literals.
List<String> expectedValues = Lists.newArrayList("'2023-06-28'", "'2023-06-28'");
List<String> values =
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.DATETIME);

Assertions.assertThat(values).isEqualTo(expectedValues);
}

@Test
public void testPartitionMonth() {
List<String> partitionIds = Lists.newArrayList("202306", "202307");
// ISO formatted dates as single quote string literals
List<String> expectedValues = Lists.newArrayList("'2023-06'", "'2023-07'");
List<String> values =
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.DATE);

Assertions.assertThat(values).isEqualTo(expectedValues);
}

@Test
public void testPartitionYear() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
// ISO formatted dates as single quote string literals
List<String> expectedValues = Lists.newArrayList("'2023'", "'2022'");
List<String> values =
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.TIMESTAMP);

Assertions.assertThat(values).isEqualTo(expectedValues);
}

@Test
public void testPartitionInteger() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
// ISO formatted dates as single quote string literals
List<String> expectedValues = Lists.newArrayList("2023", "2022");
List<String> values =
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.INT64);

Assertions.assertThat(values).isEqualTo(expectedValues);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongTemporalPartition() {
List<String> partitionIds = Lists.newArrayList("202308101112");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.TIMESTAMP);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongArrayPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(partitionIds, StandardSQLTypeName.ARRAY);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongStructPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.STRUCT);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongJsonPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(partitionIds, StandardSQLTypeName.JSON);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongGeoPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.GEOGRAPHY);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongBigNumPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.BIGNUMERIC);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongBoolPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(partitionIds, StandardSQLTypeName.BOOL);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongBytesPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(partitionIds, StandardSQLTypeName.BYTES);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongFloatPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.FLOAT64);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongStringPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.STRING);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongTimePartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(partitionIds, StandardSQLTypeName.TIME);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongIntervalPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.INTERVAL);
}

@Test(expected = IllegalArgumentException.class)
public void testWrongNumeriPartition() {
List<String> partitionIds = Lists.newArrayList("2023", "2022");
BigQueryPartition.partitionValuesFromIdAndDataType(
partitionIds, StandardSQLTypeName.NUMERIC);
}
}
Loading

0 comments on commit 8fd3d5e

Please sign in to comment.