From c0495f099cdbbe9787113d002648dc2c9641177e Mon Sep 17 00:00:00 2001 From: James McMullan Date: Wed, 21 May 2025 09:04:11 -0400 Subject: [PATCH 1/2] wip --- .../dfs/client/DFSReadWriteTest.java | 55 ++++++++++++++++++- 1 file changed, 53 insertions(+), 2 deletions(-) diff --git a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java index f0fbe7b0a..2293ccdc3 100644 --- a/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java +++ b/dfsclient/src/test/java/org/hpccsystems/dfs/client/DFSReadWriteTest.java @@ -76,26 +76,31 @@ public class DFSReadWriteTest extends BaseRemoteTest @Test public void readBadlyDistributedFileTest() throws Exception { + System.out.println("Starting test: readBadlyDistributedFileTest"); //this file only has data on two nodes HPCCFile file = new HPCCFile(datasets[1], connString , hpccUser, hpccPass); assertFalse(file.isTlkIndex()); List records = readFile(file, connTO, false); assertEquals("Not all records loaded",expectedCounts[1], records.size()); + System.out.println("Completed test: readBadlyDistributedFileTest"); } @Test public void readWithForcedTimeoutTest() throws Exception { + System.out.println("Starting test: readWithForcedTimeoutTest"); HPCCFile file = new HPCCFile(datasets[0], connString , hpccUser, hpccPass); // Set expiration to 15,000ms List records = readFile(file, 15000, true); assertEquals("Not all records loaded",expectedCounts[0], records.size()); + System.out.println("Completed test: readWithForcedTimeoutTest"); } @Test public void nullCharTests() throws Exception { + System.out.println("Starting test: nullCharTests"); // Unicode boolean unicodePassed = true; { @@ -207,11 +212,13 @@ public void nullCharTests() throws Exception assertTrue("Unicode EOS character test failed. See mismatches above.", unicodePassed); assertTrue("Single byte EOS character test failed. See mismatches above.", sbcPassed); + System.out.println("Completed test: nullCharTests"); } @Test public void longNullTerminatedStringTest() throws Exception { + System.out.println("Starting test: longNullTerminatedStringTest"); Object[] fields = new Object[1]; fields[0] = generateRandomString(4096); FieldDef recordDef = new FieldDef("RootRecord", FieldType.RECORD, "rec", 4, false, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[] { @@ -233,11 +240,13 @@ public void longNullTerminatedStringTest() throws Exception HPCCRecord readRecord = (HPCCRecord) reader.getNext(); assertEquals(record, readRecord); + System.out.println("Completed test: longNullTerminatedStringTest"); } @Test public void integrationReadWriteBackTest() throws Exception { + System.out.println("Starting test: integrationReadWriteBackTest"); for (int i = 0; i < datasets.length; i++) { HPCCFile file = new HPCCFile(datasets[i], connString, hpccUser, hpccPass); @@ -280,11 +289,13 @@ public void integrationReadWriteBackTest() throws Exception fail("recs did not project correctly"); } } + System.out.println("Completed test: integrationReadWriteBackTest"); } @Test public void readBufferResizeTest() throws Exception { + System.out.println("Starting test: readBufferResizeTest"); HPCCFile file = new HPCCFile(datasets[0], connString , hpccUser, hpccPass); DataPartition[] fileParts = file.getFileParts(); if (fileParts == null || fileParts.length == 0) @@ -328,11 +339,13 @@ public void readBufferResizeTest() throws Exception } assertEquals("Number of records did not match during read.", expectedCounts[0], records.size()); + System.out.println("Completed test: readBufferResizeTest"); } @Test public void readResumeTest() throws Exception { + System.out.println("Starting test: readResumeTest"); HPCCFile file = new HPCCFile(datasets[0], connString , hpccUser, hpccPass); DataPartition[] fileParts = file.getFileParts(); @@ -413,11 +426,13 @@ public void readResumeTest() throws Exception HPCCRecord resumedRecord = resumedRecords.get(i); assertEquals("Record " + i + ": did not match\n" + record + "\n" + resumedRecord, record, resumedRecord); } + System.out.println("Completed test: readResumeTest"); } @Test public void nullWriteTest() throws Exception { + System.out.println("Starting test: nullWriteTest"); String fname = datasets[1]; HPCCFile file = new HPCCFile(fname, connString, hpccUser, hpccPass); file.setProjectList(""); @@ -470,11 +485,13 @@ else if (field instanceof List) assertEquals(((List) field).size(),0); } } + System.out.println("Completed test: nullWriteTest"); } @Test public void nullElementTests() { + System.out.println("Starting test: nullElementTests"); FieldDef[] stringSetElemFD = new FieldDef[1]; stringSetElemFD[0] = new FieldDef("strValue", FieldType.STRING, "UTF8", 4, false, false, HpccSrcType.UTF8, new FieldDef[0]); @@ -535,25 +552,30 @@ public void nullElementTests() records.add(record); } writeFile(records, "null::element::test", recordDef, connTO); + System.out.println("Completed test: nullElementTests"); } @Test public void getMetadataTest() throws Exception { + System.out.println("Starting test: getMetadataTest"); String fname = datasets[0]; HPCCFile file = new HPCCFile(fname, connString, hpccUser, hpccPass); DFUFileDetailWrapper meta=file.getOriginalFileMetadata(); assertNotNull("Meta was null for this file",meta); assertNotNull("Record count was null for this file",meta.getRecordCount()); assertEquals(expectedCounts[0],Long.valueOf(meta.getRecordCountInt64()).intValue()); + System.out.println("Completed test: getMetadataTest"); } @Test public void getNullMetadataTest() throws Exception { + System.out.println("Starting test: getNullMetadataTest"); HPCCFile file=new HPCCFile("notthere",connString,hpccUser,hpccPass); DFUFileDetailWrapper meta=file.getOriginalFileMetadata(); assertNull("Meta should be null for nonexistent file",meta); + System.out.println("Completed test: getNullMetadataTest"); } private static final String ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_"; @@ -578,6 +600,7 @@ public static String generateRandomString(int count) @Test public void integrationLargeRecordTest() throws Exception { + System.out.println("Starting test: integrationLargeRecordTest"); // Create a large record dataset FieldDef[] fieldDefs = new FieldDef[3]; fieldDefs[0] = new FieldDef("key", FieldType.INTEGER, "lNTEGER4", 4, true, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]); @@ -595,7 +618,7 @@ public void integrationLargeRecordTest() throws Exception HPCCRecord record = new HPCCRecord(fields, recordDef); records.add(record); } - writeFile(records, "benchmark::large_record_8MB::10rows", recordDef,connTO); + writeFile(records, "benchmark::large_record_8MB::10rows", recordDef, connTO); HPCCFile file = new HPCCFile("benchmark::large_record_8MB::10rows", connString , hpccUser, hpccPass); records = readFile(file, connTO, false); @@ -613,11 +636,13 @@ public void integrationLargeRecordTest() throws Exception Assert.fail("Record mismatch"); } } + System.out.println("Completed test: integrationLargeRecordTest"); } @Test public void unsigned8ToDecimalTest() throws Exception { + System.out.println("Starting test: unsigned8ToDecimalTest"); // Create a large record dataset FieldDef[] fieldDefs = new FieldDef[3]; fieldDefs[0] = new FieldDef("field1", FieldType.INTEGER, "UNSIGNED8", 8, true, true, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]); @@ -653,11 +678,13 @@ public void unsigned8ToDecimalTest() throws Exception assertEquals(readRecord, originalRecord); } + System.out.println("Completed test: unsigned8ToDecimalTest"); } @Test public void longStringTest() throws Exception { + System.out.println("Starting test: longStringTest"); // Create a large record dataset FieldDef[] fieldDefs = new FieldDef[4]; fieldDefs[0] = new FieldDef("LongVarUnicode", FieldType.VAR_STRING, "", 4, false, false, HpccSrcType.UTF16LE, new FieldDef[0]); @@ -694,11 +721,13 @@ public void longStringTest() throws Exception HPCCRecord readRecord = originalRecords.get(i); Assert.assertEquals(originalRecord, readRecord); } + System.out.println("Completed test: longStringTest"); } @Test public void numericOverflowTest() throws Exception { + System.out.println("Starting test: numericOverflowTest"); // Create a large record dataset FieldDef[] fieldDefs = new FieldDef[16]; fieldDefs[0] = new FieldDef("int1", FieldType.INTEGER, "INTEGER1", 1, true, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]); @@ -815,12 +844,14 @@ public void numericOverflowTest() throws Exception Assert.fail("Records did not match."); } } + System.out.println("Completed test: numericOverflowTest"); } final static String dimdatefilename = "dfsclient::junit::dim_date"; @Test public void filteredDIMDATEJAPI445Test() throws Exception { + System.out.println("Starting test: filteredDIMDATEJAPI445Test"); List records = new ArrayList(); FieldDef[] fieldDefs = new FieldDef[2]; fieldDefs[0] = new FieldDef("date_sk", FieldType.INTEGER, "unsigned8", 8, true, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]); @@ -908,11 +939,13 @@ record = new HPCCRecord(fields, recordDef); { Assert.fail("Date file filter was expected to return 1 record, records returned: " + records.size()); } + System.out.println("Completed test: filteredDIMDATEJAPI445Test"); } @Test public void filteredTest() throws Exception { + System.out.println("Starting test: filteredTest"); // Create a large record dataset FieldDef[] fieldDefs = new FieldDef[2]; fieldDefs[0] = new FieldDef("key", FieldType.INTEGER, "lNTEGER4", 4, true, false, HpccSrcType.LITTLE_ENDIAN, new FieldDef[0]); @@ -938,11 +971,13 @@ public void filteredTest() throws Exception { Assert.fail("Failed to read filtered record dataset"); } + System.out.println("Completed test: filteredTest"); } @Test public void stringProcesingTests() throws Exception { + System.out.println("Starting test: stringProcesingTests"); String whiteSpaceStr = " \t\n\r\f" + '\u0009' + '\u000B' + '\u000C' + '\u001C' + '\u001D' + '\u001E' + '\u001F'; @@ -1016,11 +1051,13 @@ public void stringProcesingTests() throws Exception assertEquals(records.get(0).getField(i), nonEmptyStrings[i]); } } + System.out.println("Completed test: stringProcesingTests"); } @Test public void stringEOSTests() throws Exception { + System.out.println("Starting test: stringEOSTests"); FieldDef[] fieldDefs = new FieldDef[9]; fieldDefs[0] = new FieldDef("str1", FieldType.STRING, "UTF8", 4, false, false, HpccSrcType.UTF8, new FieldDef[0]); fieldDefs[1] = new FieldDef("str2", FieldType.STRING, "STRING", 4, false, false, HpccSrcType.SINGLE_BYTE_CHAR, new FieldDef[0]); @@ -1060,11 +1097,13 @@ public void stringEOSTests() throws Exception String field = (String) records.get(0).getField(i); assertEquals(field.trim(), nonEmptyStrings[i].trim()); } + System.out.println("Completed test: stringEOSTests"); } @Test public void resumeFileReadTest() throws Exception { + System.out.println("Starting test: resumeFileReadTest"); HPCCFile file = new HPCCFile("benchmark::integer::20kb", connString , hpccUser, hpccPass); DataPartition[] fileParts = file.getFileParts(); @@ -1183,11 +1222,13 @@ public void resumeFileReadTest() throws Exception } } } + System.out.println("Completed test: resumeFileReadTest"); } @Test public void protocolVersionTest() { + System.out.println("Starting test: protocolVersionTest"); HPCCWsDFUClient dfuClient = wsclient.getWsDFUClient(); HpccRemoteFileReader fileReader = null; @@ -1215,11 +1256,13 @@ public void protocolVersionTest() { assertFalse("Expected rowservice with version: " + remoteVersion.toString() + " to be using old protocol.", fileReader.getInputStream().isUsingNewProtocol()); } + System.out.println("Completed test: protocolVersionTest"); } @Ignore @Test public void emptyCompressedFileTest() { + System.out.println("Starting test: emptyCompressedFileTest"); HPCCWsDFUClient dfuClient = wsclient.getWsDFUClient(); Version remoteVersion = dfuClient.getTargetHPCCBuildVersion(); @@ -1247,11 +1290,13 @@ public void emptyCompressedFileTest() Assert.fail("Expected an exception when the file was closed without having written any data with this version of the protocol."); } + System.out.println("Completed test: emptyCompressedFileTest"); } @Test public void filePartReadRetryTest() { + System.out.println("Starting test: filePartReadRetryTest"); { HPCCFile readFile = null; try @@ -1294,12 +1339,13 @@ public void filePartReadRetryTest() Assert.fail(e.getMessage()); } } + System.out.println("Completed test: filePartReadRetryTest"); } @Test public void invalidSignatureTest() { - + System.out.println("Starting test: invalidSignatureTest"); HPCCFile readFile = null; { Exception readException = null; @@ -1374,11 +1420,13 @@ public void invalidSignatureTest() Assert.fail("Expected an exception during write due to the invalid signature"); } } + System.out.println("Completed test: invalidSignatureTest"); } @Test public void earlyCloseTest() throws Exception { + System.out.println("Starting test: earlyCloseTest"); HPCCFile file = new HPCCFile(datasets[0], connString , hpccUser, hpccPass); DataPartition[] fileParts = file.getFileParts(); @@ -1445,6 +1493,7 @@ public void earlyCloseTest() throws Exception } assertTrue("Expected record count: " + expectedRecordCounts + " Actual count: " + numRecords, numRecords == expectedRecordCounts); } + System.out.println("Completed test: earlyCloseTest"); } public List readFile(HPCCFile file, Integer connectTimeoutMillis, boolean shouldForceTimeout) throws Exception @@ -1638,6 +1687,7 @@ private void writeFileAndReportAnyExceptions(List records, String fi @Test public void readIndexTest() throws Exception { + System.out.println("Starting test: readIndexTest"); String indexName = "test::index::child_dataset::key"; HPCCFile file = new HPCCFile(indexName, connString, hpccUser, hpccPass); @@ -1657,6 +1707,7 @@ public void readIndexTest() throws Exception { Assert.fail("Unexpected record count. Expected: 125, Actual: " + numRecords); } + System.out.println("Completed test: readIndexTest"); } private class LongKVData From e03647c1d08cf84da948089b0d51b9a8a5b5a50f Mon Sep 17 00:00:00 2001 From: James McMullan Date: Thu, 22 May 2025 08:36:28 -0400 Subject: [PATCH 2/2] Print logs --- .github/workflows/k8s-regression-suite.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/k8s-regression-suite.yml b/.github/workflows/k8s-regression-suite.yml index a2656a2bb..bbf420680 100644 --- a/.github/workflows/k8s-regression-suite.yml +++ b/.github/workflows/k8s-regression-suite.yml @@ -146,6 +146,14 @@ jobs: - name: Build with Maven run: mvn -B --activate-profiles jenkins-on-demand,spark33 -Dmaven.gpg.skip=true -Dmaven.javadoc.skip=true -Dmaven.test.failure.ignore=true -Dhpccconn=https://eclwatch.default:8010 -Dwssqlconn=https://sql2ecl.default:8510 -DHPCC30117=open install + - name: Collect Rowservice Logs If DFS Tests Failed + if: hashFiles('./dfsclient/FailedTests.csv') != '' + run: | + echo "DFSClient tests failed - collecting rowservice pod logs" + ROWSERVICE_POD=$(kubectl get pods -l server=rowservice -o jsonpath='{.items[0].metadata.name}') + echo "Rowservice pod: $ROWSERVICE_POD" + kubectl logs $ROWSERVICE_POD --tail=500 + - name: Process Errors shell: python run: |