From 87faf8bb0a50ac849cdb7efafb630778b670c404 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Wed, 26 Sep 2018 18:11:26 -0400 Subject: [PATCH 01/24] fix(load): skip bad (null) dates when validating services --- src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java index c4232185c..007707a88 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java @@ -181,6 +181,8 @@ select durations.service_id, duration_seconds, days_active from ( LocalDate firstDate = LocalDate.MAX; LocalDate lastDate = LocalDate.MIN; for (LocalDate date : dateInfoForDate.keySet()) { + // If the date is invalid, skip. + if (date == null) continue; if (date.isBefore(firstDate)) firstDate = date; if (date.isAfter(lastDate)) lastDate = date; } From d673e3669cb6dd0a1b4229e138337312ed1d0618 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 8 Nov 2018 16:42:49 -0500 Subject: [PATCH 02/24] test: add test with bad calendar dates --- src/test/java/com/conveyal/gtfs/GTFSTest.java | 17 ++++++++++++++++- .../fake-agency-bad-calendar-date/agency.txt | 2 ++ .../fake-agency-bad-calendar-date/calendar.txt | 2 ++ .../calendar_dates.txt | 3 +++ .../fare_attributes.txt | 2 ++ .../fare_rules.txt | 2 ++ .../fake-agency-bad-calendar-date/feed_info.txt | 2 ++ .../frequencies.txt | 2 ++ .../fake-agency-bad-calendar-date/routes.txt | 2 ++ .../fake-agency-bad-calendar-date/shapes.txt | 8 ++++++++ .../stop_times.txt | 5 +++++ .../fake-agency-bad-calendar-date/stops.txt | 3 +++ .../fake-agency-bad-calendar-date/transfers.txt | 1 + .../fake-agency-bad-calendar-date/trips.txt | 3 +++ 14 files changed, 53 insertions(+), 1 deletion(-) create mode 100755 src/test/resources/fake-agency-bad-calendar-date/agency.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/calendar.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/calendar_dates.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/fare_attributes.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/fare_rules.txt create mode 100644 src/test/resources/fake-agency-bad-calendar-date/feed_info.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/frequencies.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/routes.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/shapes.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/stop_times.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/stops.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/transfers.txt create mode 100755 src/test/resources/fake-agency-bad-calendar-date/trips.txt diff --git a/src/test/java/com/conveyal/gtfs/GTFSTest.java b/src/test/java/com/conveyal/gtfs/GTFSTest.java index 6bf252d15..c2c7361ff 100644 --- a/src/test/java/com/conveyal/gtfs/GTFSTest.java +++ b/src/test/java/com/conveyal/gtfs/GTFSTest.java @@ -228,6 +228,21 @@ public void canLoadAndExportSimpleAgency() { ); } + /** + * Tests that a GTFS feed with bad date values in calendars.txt and calendar_dates.txt can pass the integration test. + */ + @Test + public void canLoadFeedWithBadDates () { + assertThat( + runIntegrationTest( + "fake-agency-bad-calendar-date", + nullValue(), + new PersistanceExpectation[]{} + ), + equalTo(true) + ); + } + /** * Tests whether the simple gtfs can be loaded and exported if it has only calendar_dates.txt */ @@ -473,7 +488,7 @@ private void assertThatExportedGtfsMeetsExpectations( PersistanceExpectation[] persistanceExpectations, boolean fromEditor ) throws IOException { - LOG.info("testing expecations of csv outputs in an exported gtfs"); + LOG.info("testing expectations of csv outputs in an exported gtfs"); ZipFile gtfsZipfile = new ZipFile(tempFile.getAbsolutePath()); diff --git a/src/test/resources/fake-agency-bad-calendar-date/agency.txt b/src/test/resources/fake-agency-bad-calendar-date/agency.txt new file mode 100755 index 000000000..a916ce91b --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/agency.txt @@ -0,0 +1,2 @@ +agency_id,agency_name,agency_url,agency_lang,agency_phone,agency_email,agency_timezone,agency_fare_url,agency_branding_url +1,Fake Transit,,,,,America/Los_Angeles,, diff --git a/src/test/resources/fake-agency-bad-calendar-date/calendar.txt b/src/test/resources/fake-agency-bad-calendar-date/calendar.txt new file mode 100755 index 000000000..03b2867f7 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/calendar.txt @@ -0,0 +1,2 @@ +service_id,monday,tuesday,wednesday,thursday,friday,saturday,sunday,start_date,end_date +04100312-8fe1-46a5-a9f2-556f39478f57,1,1,1,1,1,1,1,bad_date!,20170917 diff --git a/src/test/resources/fake-agency-bad-calendar-date/calendar_dates.txt b/src/test/resources/fake-agency-bad-calendar-date/calendar_dates.txt new file mode 100755 index 000000000..546ad12b5 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/calendar_dates.txt @@ -0,0 +1,3 @@ +service_id,date,exception_type +04100312-8fe1-46a5-a9f2-556f39478f57,bad_date,2 +04100312-8fe1-46a5-a9f2-556f39478f57,bad_date2,1 \ No newline at end of file diff --git a/src/test/resources/fake-agency-bad-calendar-date/fare_attributes.txt b/src/test/resources/fake-agency-bad-calendar-date/fare_attributes.txt new file mode 100755 index 000000000..3173d016d --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/fare_attributes.txt @@ -0,0 +1,2 @@ +fare_id,price,currency_type,payment_method,transfers,transfer_duration +route_based_fare,1.23,USD,0,0,0 \ No newline at end of file diff --git a/src/test/resources/fake-agency-bad-calendar-date/fare_rules.txt b/src/test/resources/fake-agency-bad-calendar-date/fare_rules.txt new file mode 100755 index 000000000..05d2aadf8 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/fare_rules.txt @@ -0,0 +1,2 @@ +fare_id,route_id,origin_id,destination_id,contains_id +route_based_fare,1,,, \ No newline at end of file diff --git a/src/test/resources/fake-agency-bad-calendar-date/feed_info.txt b/src/test/resources/fake-agency-bad-calendar-date/feed_info.txt new file mode 100644 index 000000000..b617faf49 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/feed_info.txt @@ -0,0 +1,2 @@ +feed_publisher_name,feed_publisher_url,feed_lang,feed_version +Conveyal,http://www.conveyal.com,en,1.0 \ No newline at end of file diff --git a/src/test/resources/fake-agency-bad-calendar-date/frequencies.txt b/src/test/resources/fake-agency-bad-calendar-date/frequencies.txt new file mode 100755 index 000000000..9baceff3a --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/frequencies.txt @@ -0,0 +1,2 @@ +trip_id,start_time,end_time,headway_secs,exact_times +frequency-trip,08:00:00,09:00:00,1800,0 \ No newline at end of file diff --git a/src/test/resources/fake-agency-bad-calendar-date/routes.txt b/src/test/resources/fake-agency-bad-calendar-date/routes.txt new file mode 100755 index 000000000..35ea7aa67 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/routes.txt @@ -0,0 +1,2 @@ +agency_id,route_id,route_short_name,route_long_name,route_desc,route_type,route_url,route_color,route_text_color,route_branding_url +1,1,1,Route 1,,3,,7CE6E7,FFFFFF, diff --git a/src/test/resources/fake-agency-bad-calendar-date/shapes.txt b/src/test/resources/fake-agency-bad-calendar-date/shapes.txt new file mode 100755 index 000000000..3f2e3fd13 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/shapes.txt @@ -0,0 +1,8 @@ +shape_id,shape_pt_lat,shape_pt_lon,shape_pt_sequence,shape_dist_traveled +5820f377-f947-4728-ac29-ac0102cbc34e,37.0612132,-122.0074332,1,0.0000000 +5820f377-f947-4728-ac29-ac0102cbc34e,37.0611720,-122.0075000,2,7.4997067 +5820f377-f947-4728-ac29-ac0102cbc34e,37.0613590,-122.0076830,3,33.8739075 +5820f377-f947-4728-ac29-ac0102cbc34e,37.0608780,-122.0082780,4,109.0402932 +5820f377-f947-4728-ac29-ac0102cbc34e,37.0603590,-122.0088280,5,184.6078298 +5820f377-f947-4728-ac29-ac0102cbc34e,37.0597610,-122.0093540,6,265.8053023 +5820f377-f947-4728-ac29-ac0102cbc34e,37.0590660,-122.0099190,7,357.8617018 diff --git a/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt b/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt new file mode 100755 index 000000000..a8b109280 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt @@ -0,0 +1,5 @@ +trip_id,arrival_time,departure_time,stop_id,stop_sequence,stop_headsign,pickup_type,drop_off_type,shape_dist_traveled,timepoint +a30277f8-e50a-4a85-9141-b1e0da9d429d,07:00:00,07:00:00,4u6g,1,,0,0,0.0000000, +a30277f8-e50a-4a85-9141-b1e0da9d429d,07:01:00,07:01:00,johv,2,,0,0,341.4491961, +frequency-trip,08:00:00,08:00:00,4u6g,1,,0,0,0.0000000, +frequency-trip,08:01:00,08:01:00,johv,2,,0,0,341.4491961, diff --git a/src/test/resources/fake-agency-bad-calendar-date/stops.txt b/src/test/resources/fake-agency-bad-calendar-date/stops.txt new file mode 100755 index 000000000..8e71f7b2e --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/stops.txt @@ -0,0 +1,3 @@ +stop_id,stop_code,stop_name,stop_desc,stop_lat,stop_lon,zone_id,stop_url,location_type,parent_station,stop_timezone,wheelchair_boarding +4u6g,,Butler Ln,,37.0612132,-122.0074332,,,0,,, +johv,,Scotts Valley Dr & Victor Sq,,37.0590172,-122.0096058,,,0,,, diff --git a/src/test/resources/fake-agency-bad-calendar-date/transfers.txt b/src/test/resources/fake-agency-bad-calendar-date/transfers.txt new file mode 100755 index 000000000..357103c47 --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/transfers.txt @@ -0,0 +1 @@ +from_stop_id,to_stop_id,transfer_type,min_transfer_time diff --git a/src/test/resources/fake-agency-bad-calendar-date/trips.txt b/src/test/resources/fake-agency-bad-calendar-date/trips.txt new file mode 100755 index 000000000..eab14b86d --- /dev/null +++ b/src/test/resources/fake-agency-bad-calendar-date/trips.txt @@ -0,0 +1,3 @@ +route_id,trip_id,trip_headsign,trip_short_name,direction_id,block_id,shape_id,bikes_allowed,wheelchair_accessible,service_id +1,a30277f8-e50a-4a85-9141-b1e0da9d429d,,,0,,5820f377-f947-4728-ac29-ac0102cbc34e,0,0,04100312-8fe1-46a5-a9f2-556f39478f57 +1,frequency-trip,,,0,,5820f377-f947-4728-ac29-ac0102cbc34e,0,0,04100312-8fe1-46a5-a9f2-556f39478f57 \ No newline at end of file From cd2627e5ef3fb225f1b163ced26dba827758592b Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 8 Nov 2018 16:49:30 -0500 Subject: [PATCH 03/24] test: fix class name misspelling --- src/test/java/com/conveyal/gtfs/GTFSTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/com/conveyal/gtfs/GTFSTest.java b/src/test/java/com/conveyal/gtfs/GTFSTest.java index 104f251b8..b9b16d93a 100644 --- a/src/test/java/com/conveyal/gtfs/GTFSTest.java +++ b/src/test/java/com/conveyal/gtfs/GTFSTest.java @@ -237,7 +237,7 @@ public void canLoadFeedWithBadDates () { runIntegrationTest( "fake-agency-bad-calendar-date", nullValue(), - new PersistanceExpectation[]{} + new PersistenceExpectation[]{} ), equalTo(true) ); From 62eb74e76c80f2774b3c69dfc474626d4a0bb043 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Fri, 9 Nov 2018 17:17:45 -0500 Subject: [PATCH 04/24] test(gtfs): improve SQL GTFS integration tests --- src/main/java/com/conveyal/gtfs/GTFS.java | 5 +- .../conveyal/gtfs/error/SQLErrorStorage.java | 1 + .../java/com/conveyal/gtfs/loader/Feed.java | 4 +- .../gtfs/loader/JdbcGtfsExporter.java | 6 +- .../conveyal/gtfs/loader/JdbcGtfsLoader.java | 4 +- .../gtfs/loader/JdbcGtfsSnapshotter.java | 16 +- .../conveyal/gtfs/loader/SnapshotResult.java | 11 + .../gtfs/validator/ServiceValidator.java | 10 +- src/test/java/com/conveyal/gtfs/GTFSTest.java | 345 +++++++++--------- .../gtfs/loader/JDBCTableWriterTest.java | 8 +- .../gtfs/storage/ExpectedFieldType.java | 6 + .../gtfs/storage/PersistenceExpectation.java | 26 ++ .../gtfs/storage/RecordExpectation.java | 81 ++++ .../stop_times.txt | 2 + .../fake-agency-bad-calendar-date/trips.txt | 1 + 15 files changed, 333 insertions(+), 193 deletions(-) create mode 100644 src/main/java/com/conveyal/gtfs/loader/SnapshotResult.java create mode 100644 src/test/java/com/conveyal/gtfs/storage/ExpectedFieldType.java create mode 100644 src/test/java/com/conveyal/gtfs/storage/PersistenceExpectation.java create mode 100644 src/test/java/com/conveyal/gtfs/storage/RecordExpectation.java diff --git a/src/main/java/com/conveyal/gtfs/GTFS.java b/src/main/java/com/conveyal/gtfs/GTFS.java index d1e2fb690..11c8f951b 100644 --- a/src/main/java/com/conveyal/gtfs/GTFS.java +++ b/src/main/java/com/conveyal/gtfs/GTFS.java @@ -5,6 +5,7 @@ import com.conveyal.gtfs.loader.JdbcGtfsExporter; import com.conveyal.gtfs.loader.JdbcGtfsLoader; import com.conveyal.gtfs.loader.JdbcGtfsSnapshotter; +import com.conveyal.gtfs.loader.SnapshotResult; import com.conveyal.gtfs.util.InvalidNamespaceException; import com.conveyal.gtfs.validator.ValidationResult; import com.fasterxml.jackson.databind.ObjectMapper; @@ -83,9 +84,9 @@ public static FeedLoadResult load (String filePath, DataSource dataSource) { * @param dataSource JDBC connection to existing database * @return FIXME should this be a separate SnapshotResult object? */ - public static FeedLoadResult makeSnapshot (String feedId, DataSource dataSource) { + public static SnapshotResult makeSnapshot (String feedId, DataSource dataSource) { JdbcGtfsSnapshotter snapshotter = new JdbcGtfsSnapshotter(feedId, dataSource); - FeedLoadResult result = snapshotter.copyTables(); + SnapshotResult result = snapshotter.copyTables(); return result; } diff --git a/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java b/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java index be42d2504..f523ede6c 100644 --- a/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java +++ b/src/main/java/com/conveyal/gtfs/error/SQLErrorStorage.java @@ -124,6 +124,7 @@ private void commit() { */ public void commitAndClose() { try { + LOG.info("Committing errors and closing SQL connection."); this.commit(); // Close the connection permanently (should be called only after errorStorage instance no longer needed). connection.close(); diff --git a/src/main/java/com/conveyal/gtfs/loader/Feed.java b/src/main/java/com/conveyal/gtfs/loader/Feed.java index c56fb099b..bb896196f 100644 --- a/src/main/java/com/conveyal/gtfs/loader/Feed.java +++ b/src/main/java/com/conveyal/gtfs/loader/Feed.java @@ -81,9 +81,7 @@ public ValidationResult validate () { SQLErrorStorage errorStorage = null; try { errorStorage = new SQLErrorStorage(dataSource.getConnection(), tablePrefix, false); - } catch (SQLException ex) { - throw new StorageException(ex); - } catch (InvalidNamespaceException ex) { + } catch (SQLException | InvalidNamespaceException ex) { throw new StorageException(ex); } int errorCountBeforeValidation = errorStorage.getErrorCount(); diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsExporter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsExporter.java index ca05d7a74..9708c0f50 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsExporter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsExporter.java @@ -309,7 +309,7 @@ public FeedLoadResult exportTables() { // TableLoadResult. LOG.error("Exception while creating snapshot: {}", ex.toString()); ex.printStackTrace(); - result.fatalException = ex.getMessage(); + result.fatalException = ex.toString(); } return result; } @@ -354,7 +354,7 @@ private TableLoadResult export (Table table, Connection connection) { } catch (SQLException e) { LOG.error("failed to generate select statement for existing fields"); TableLoadResult tableLoadResult = new TableLoadResult(); - tableLoadResult.fatalException = e.getMessage(); + tableLoadResult.fatalException = e.toString(); e.printStackTrace(); return tableLoadResult; } @@ -402,7 +402,7 @@ private TableLoadResult export (Table table, String filterSql) { } catch (SQLException ex) { ex.printStackTrace(); } - tableLoadResult.fatalException = e.getMessage(); + tableLoadResult.fatalException = e.toString(); LOG.error("Exception while exporting tables", e); } return tableLoadResult; diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsLoader.java b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsLoader.java index 25f64dbc5..b777a3997 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsLoader.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsLoader.java @@ -160,7 +160,7 @@ public FeedLoadResult loadTables () { // TODO catch exceptions separately while loading each table so load can continue, store in TableLoadResult LOG.error("Exception while loading GTFS file: {}", ex.toString()); ex.printStackTrace(); - result.fatalException = ex.getMessage(); + result.fatalException = ex.toString(); } return result; } @@ -224,7 +224,7 @@ private void registerFeed (File gtfsFile) { connection.commit(); LOG.info("Created new feed namespace: {}", insertStatement); } catch (Exception ex) { - LOG.error("Exception while registering new feed namespace in feeds table: {}", ex.getMessage()); + LOG.error("Exception while registering new feed namespace in feeds table", ex); DbUtils.closeQuietly(connection); } } diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java index 2229968ec..739a3c0b7 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java @@ -55,9 +55,9 @@ public JdbcGtfsSnapshotter(String feedId, DataSource dataSource) { /** * Copy primary entity tables as well as Pattern and PatternStops tables. */ - public FeedLoadResult copyTables() { + public SnapshotResult copyTables() { // This result object will be returned to the caller to summarize the feed and report any critical errors. - FeedLoadResult result = new FeedLoadResult(); + SnapshotResult result = new SnapshotResult(); try { long startTime = System.currentTimeMillis(); @@ -88,7 +88,7 @@ public FeedLoadResult copyTables() { copy(Table.PATTERNS, true); copy(Table.PATTERN_STOP, true); // see method comments fo why different logic is needed for this table - createScheduleExceptionsTable(); + result.scheduleExceptions = createScheduleExceptionsTable(); result.shapes = copy(Table.SHAPES, true); result.stops = copy(Table.STOPS, true); // TODO: Should we defer index creation on stop times? @@ -105,7 +105,7 @@ public FeedLoadResult copyTables() { // TableLoadResult. LOG.error("Exception while creating snapshot: {}", ex.toString()); ex.printStackTrace(); - result.fatalException = ex.getMessage(); + result.fatalException = ex.toString(); } return result; } @@ -144,7 +144,7 @@ private TableLoadResult copy (Table table, boolean createIndexes) { connection.commit(); LOG.info("Done."); } catch (Exception ex) { - tableLoadResult.fatalException = ex.getMessage(); + tableLoadResult.fatalException = ex.toString(); LOG.error("Error: ", ex); try { connection.rollback(); @@ -306,8 +306,8 @@ private TableLoadResult createScheduleExceptionsTable() { } connection.commit(); - } catch (SQLException e) { - tableLoadResult.fatalException = e.getMessage(); + } catch (Exception e) { + tableLoadResult.fatalException = e.toString(); LOG.error("Error creating schedule Exceptions: ", e); e.printStackTrace(); try { @@ -440,7 +440,7 @@ private void registerSnapshot () { connection.commit(); LOG.info("Created new snapshot namespace: {}", insertStatement); } catch (Exception ex) { - LOG.error("Exception while registering snapshot namespace in feeds table: {}", ex.getMessage()); + LOG.error("Exception while registering snapshot namespace in feeds table", ex); DbUtils.closeQuietly(connection); } } diff --git a/src/main/java/com/conveyal/gtfs/loader/SnapshotResult.java b/src/main/java/com/conveyal/gtfs/loader/SnapshotResult.java new file mode 100644 index 000000000..9c596e345 --- /dev/null +++ b/src/main/java/com/conveyal/gtfs/loader/SnapshotResult.java @@ -0,0 +1,11 @@ +package com.conveyal.gtfs.loader; + +/** + * This contains the result of a feed snapshot operation. It is nearly identical to {@link FeedLoadResult} except that + * it has some additional tables that only exist for snapshots/editor feeds. + */ +public class SnapshotResult extends FeedLoadResult { + private static final long serialVersionUID = 1L; + + public TableLoadResult scheduleExceptions; +} diff --git a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java index 9242b917c..9e7eeca79 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java @@ -117,7 +117,7 @@ public void complete(ValidationResult validationResult) { } } } catch (Exception ex) { - LOG.error(ex.getMessage()); + LOG.error("Error validating service entries (merging calendars and calendar_dates)"); ex.printStackTrace(); // Continue on to next calendar entry. } @@ -182,7 +182,10 @@ select durations.service_id, duration_seconds, days_active from ( LocalDate lastDate = LocalDate.MIN; for (LocalDate date : dateInfoForDate.keySet()) { // If the date is invalid, skip. - if (date == null) continue; + if (date == null) { + LOG.error("Encountered null date. Did something go wrong with computeIfAbsent?"); + continue; + } if (date.isBefore(firstDate)) firstDate = date; if (date.isAfter(lastDate)) lastDate = date; } @@ -190,7 +193,8 @@ select durations.service_id, duration_seconds, days_active from ( validationResult.firstCalendarDate = firstDate; validationResult.lastCalendarDate = lastDate; // Is this any different? firstDate.until(lastDate, ChronoUnit.DAYS); - int nDays = (int) ChronoUnit.DAYS.between(firstDate, lastDate) + 1; + // If no days were found in the dateInfoForDate, nDays is a very large negative number, so we default to 0. + int nDays = Math.max(0, (int) ChronoUnit.DAYS.between(firstDate, lastDate) + 1); validationResult.dailyBusSeconds = new int[nDays]; validationResult.dailyTramSeconds = new int[nDays]; validationResult.dailyMetroSeconds = new int[nDays]; diff --git a/src/test/java/com/conveyal/gtfs/GTFSTest.java b/src/test/java/com/conveyal/gtfs/GTFSTest.java index 8c845cb4d..fb408c248 100644 --- a/src/test/java/com/conveyal/gtfs/GTFSTest.java +++ b/src/test/java/com/conveyal/gtfs/GTFSTest.java @@ -1,11 +1,17 @@ package com.conveyal.gtfs; +import com.conveyal.gtfs.error.NewGTFSErrorType; import com.conveyal.gtfs.loader.FeedLoadResult; +import com.conveyal.gtfs.loader.SnapshotResult; +import com.conveyal.gtfs.storage.ExpectedFieldType; +import com.conveyal.gtfs.storage.PersistenceExpectation; +import com.conveyal.gtfs.storage.RecordExpectation; import com.conveyal.gtfs.validator.ValidationResult; import com.csvreader.CsvReader; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.Multimap; import org.apache.commons.io.input.BOMInputStream; -import org.hamcrest.Matcher; import org.hamcrest.comparator.ComparatorMatcherBuilder; import org.junit.Before; import org.junit.Test; @@ -20,9 +26,9 @@ import java.io.PrintStream; import java.nio.charset.Charset; import java.sql.Connection; -import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.Collection; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; @@ -219,11 +225,7 @@ public void canLoadAndExportSimpleAgency() { ) }; assertThat( - runIntegrationTest( - "fake-agency", - nullValue(), - persistenceExpectations - ), + runIntegrationTest("fake-agency", persistenceExpectations), equalTo(true) ); } @@ -233,13 +235,18 @@ public void canLoadAndExportSimpleAgency() { */ @Test public void canLoadFeedWithBadDates () { + PersistenceExpectation[] expectations = PersistenceExpectation.list( + new PersistenceExpectation( + "calendar", + new RecordExpectation[]{ + new RecordExpectation("start_date", null) + } + ) + ); assertThat( - runIntegrationTest( - "fake-agency-bad-calendar-date", - nullValue(), - new PersistenceExpectation[]{} - ), - equalTo(true) + "Integration test passes", + runIntegrationTest("fake-agency-bad-calendar-date", expectations), + equalTo(true) ); } @@ -304,11 +311,7 @@ public void canLoadAndExportSimpleAgencyWithOnlyCalendarDates() { ) }; assertThat( - runIntegrationTest( - "fake-agency-only-calendar-dates", - nullValue(), - persistenceExpectations - ), + runIntegrationTest("fake-agency-only-calendar-dates", persistenceExpectations), equalTo(true) ); } @@ -321,19 +324,7 @@ public void canLoadAndExportSimpleAgencyWithOnlyCalendarDates() { * After the GTFS is loaded, this will also initiate an export of a GTFS from the database and check * the integrity of the exported GTFS. */ - private boolean runIntegrationTest( - String folderName, - Matcher fatalExceptionExpectation, - PersistenceExpectation[] persistenceExpectations - ) { - // zip up test folder into temp zip file - String zipFileName = null; - try { - zipFileName = TestUtils.zipFolderFiles(folderName); - } catch (IOException e) { - e.printStackTrace(); - return false; - } + private boolean runIntegrationTest(String folderName, PersistenceExpectation[] persistenceExpectations) { String newDBName = TestUtils.generateNewDB(); String dbConnectionUrl = String.format("jdbc:postgresql://localhost/%s", newDBName); DataSource dataSource = GTFS.createDataSource( @@ -342,31 +333,16 @@ private boolean runIntegrationTest( null ); - String namespace; - - // Verify that loading the feed completes and data is stored properly - try { - // load and validate feed - LOG.info("load and validate feed"); - FeedLoadResult loadResult = GTFS.load(zipFileName, dataSource); - ValidationResult validationResult = GTFS.validate(loadResult.uniqueIdentifier, dataSource); - - assertThat(validationResult.fatalException, is(fatalExceptionExpectation)); - namespace = loadResult.uniqueIdentifier; - - assertThatImportedGtfsMeetsExpectations(dbConnectionUrl, namespace, persistenceExpectations); - } catch (SQLException e) { + String namespace = zipFolderAndLoadGTFS(folderName, dataSource, persistenceExpectations); + if (namespace == null) { + // Indicates that loading failed. TestUtils.dropDB(newDBName); - e.printStackTrace(); return false; - } catch (AssertionError e) { - TestUtils.dropDB(newDBName); - throw e; } - // Verify that exporting the feed (in non-editor mode) completes and data is outputed properly + // Verify that exporting the feed (in non-editor mode) completes and data is outputted properly try { - LOG.info("export GTFS from created namespase"); + LOG.info("export GTFS from created namespace"); File tempFile = exportGtfs(namespace, dataSource, false); assertThatExportedGtfsMeetsExpectations(tempFile, persistenceExpectations, false); } catch (IOException e) { @@ -381,9 +357,10 @@ private boolean runIntegrationTest( // Verify that making a snapshot from an existing feed database, then exporting that snapshot to a GTFS zip file // works as expected try { - LOG.info("copy GTFS from created namespase"); - FeedLoadResult copyResult = GTFS.makeSnapshot(namespace, dataSource); - LOG.info("export GTFS from copied namespase"); + LOG.info("copy GTFS from created namespace"); + SnapshotResult copyResult = GTFS.makeSnapshot(namespace, dataSource); + assertThatSnapshotIsErrorFree(copyResult); + LOG.info("export GTFS from copied namespace"); File tempFile = exportGtfs(copyResult.uniqueIdentifier, dataSource, true); assertThatExportedGtfsMeetsExpectations(tempFile, persistenceExpectations, true); } catch (IOException e) { @@ -396,6 +373,61 @@ private boolean runIntegrationTest( return true; } + private void assertThatLoadIsErrorFree(FeedLoadResult loadResult) { + assertThat(loadResult.fatalException, is(nullValue())); + assertThat(loadResult.agency.fatalException, is(nullValue())); + assertThat(loadResult.calendar.fatalException, is(nullValue())); + assertThat(loadResult.calendarDates.fatalException, is(nullValue())); + assertThat(loadResult.fareAttributes.fatalException, is(nullValue())); + assertThat(loadResult.fareRules.fatalException, is(nullValue())); + assertThat(loadResult.feedInfo.fatalException, is(nullValue())); + assertThat(loadResult.frequencies.fatalException, is(nullValue())); + assertThat(loadResult.routes.fatalException, is(nullValue())); + assertThat(loadResult.shapes.fatalException, is(nullValue())); + assertThat(loadResult.stops.fatalException, is(nullValue())); + assertThat(loadResult.stopTimes.fatalException, is(nullValue())); + assertThat(loadResult.transfers.fatalException, is(nullValue())); + assertThat(loadResult.trips.fatalException, is(nullValue())); + } + + private void assertThatSnapshotIsErrorFree(SnapshotResult snapshotResult) { + assertThatLoadIsErrorFree(snapshotResult); + assertThat(snapshotResult.scheduleExceptions.fatalException, is(nullValue())); + } + + private String zipFolderAndLoadGTFS(String folderName, DataSource dataSource, PersistenceExpectation[] persistenceExpectations) { + // zip up test folder into temp zip file + String zipFileName; + try { + zipFileName = TestUtils.zipFolderFiles(folderName); + } catch (IOException e) { + e.printStackTrace(); + return null; + } + String namespace; + // Verify that loading the feed completes and data is stored properly + try { + // load and validate feed + LOG.info("load and validate feed"); + FeedLoadResult loadResult = GTFS.load(zipFileName, dataSource); + ValidationResult validationResult = GTFS.validate(loadResult.uniqueIdentifier, dataSource); + DataSource ds = GTFS.createDataSource( + dataSource.getConnection().getMetaData().getURL(), + null, + null, + true + ); + assertThatLoadIsErrorFree(loadResult); + assertThat(validationResult.fatalException, is(nullValue())); + namespace = loadResult.uniqueIdentifier; + assertThatImportedGtfsMeetsExpectations(ds.getConnection(), namespace, persistenceExpectations); + } catch (SQLException | AssertionError e) { + e.printStackTrace(); + return null; + } + return namespace; + } + /** * Helper function to export a GTFS from the database to a temporary zip file. */ @@ -405,18 +437,30 @@ private File exportGtfs(String namespace, DataSource dataSource, boolean fromEdi return tempFile; } + private class ValuePair { + private final Object expected; + private final Object found; + private ValuePair (Object expected, Object found) { + this.expected = expected; + this.found = found; + } + } + /** * Run through the list of persistence expectations to make sure that the feed was imported properly into the * database. */ private void assertThatImportedGtfsMeetsExpectations( - String dbConnectionUrl, + Connection connection, String namespace, PersistenceExpectation[] persistenceExpectations ) throws SQLException { + // Store field mismatches here (to provide assertion statements with more details). + Multimap fieldsWithMismatches = ArrayListMultimap.create(); + // Check that no validators failed during validation. + assertThatValidationErrorsMatchExpected(connection, namespace, NewGTFSErrorType.VALIDATOR_FAILED, 0); // run through testing expectations - LOG.info("testing expecations of record storage in the database"); - Connection conn = DriverManager.getConnection(dbConnectionUrl); + LOG.info("testing expectations of record storage in the database"); for (PersistenceExpectation persistenceExpectation : persistenceExpectations) { // select all entries from a table String sql = String.format( @@ -425,7 +469,7 @@ private void assertThatImportedGtfsMeetsExpectations( persistenceExpectation.tableName ); LOG.info(sql); - ResultSet rs = conn.prepareStatement(sql).executeQuery(); + ResultSet rs = connection.prepareStatement(sql).executeQuery(); boolean foundRecord = false; int numRecordsSearched = 0; while (rs.next()) { @@ -435,32 +479,48 @@ private void assertThatImportedGtfsMeetsExpectations( for (RecordExpectation recordExpectation: persistenceExpectation.recordExpectations) { switch (recordExpectation.expectedFieldType) { case DOUBLE: + double doubleVal = rs.getDouble(recordExpectation.fieldName); LOG.info(String.format( "%s: %f", recordExpectation.fieldName, - rs.getDouble(recordExpectation.fieldName) + doubleVal )); - if (rs.getDouble(recordExpectation.fieldName) != recordExpectation.doubleExpectation) { + if (doubleVal != recordExpectation.doubleExpectation) { allFieldsMatch = false; } break; case INT: + int intVal = rs.getInt(recordExpectation.fieldName); LOG.info(String.format( "%s: %d", recordExpectation.fieldName, - rs.getInt(recordExpectation.fieldName) + intVal )); - if (rs.getInt(recordExpectation.fieldName) != recordExpectation.intExpectation) { + if (intVal != recordExpectation.intExpectation) { + fieldsWithMismatches.put( + recordExpectation.fieldName, + new ValuePair(recordExpectation.stringExpectation, intVal) + ); allFieldsMatch = false; } break; case STRING: + String strVal = rs.getString(recordExpectation.fieldName); LOG.info(String.format( "%s: %s", recordExpectation.fieldName, - rs.getString(recordExpectation.fieldName) + strVal )); - if (!rs.getString(recordExpectation.fieldName).equals(recordExpectation.stringExpectation)) { + if (strVal == null && recordExpectation.stringExpectation == null) { + break; + } else if ( + (strVal == null && recordExpectation.stringExpectation != null) || + !strVal.equals(recordExpectation.stringExpectation) + ) { + fieldsWithMismatches.put( + recordExpectation.fieldName, + new ValuePair(recordExpectation.stringExpectation, strVal) + ); allFieldsMatch = false; } break; @@ -477,8 +537,31 @@ private void assertThatImportedGtfsMeetsExpectations( break; } } - assertThatPersistenceExpectationRecordWasFound(numRecordsSearched, foundRecord); + assertThatPersistenceExpectationRecordWasFound(numRecordsSearched, foundRecord, fieldsWithMismatches); + } + } + + private void assertThatValidationErrorsMatchExpected( + Connection connection, + String namespace, + NewGTFSErrorType errorType, + int expectedCount + ) throws SQLException { + String errorCheckSql = String.format( + "select * from %s.errors where error_type = '%s'", + namespace, + errorType); + LOG.info(errorCheckSql); + ResultSet errorResults = connection.prepareStatement(errorCheckSql).executeQuery(); + int errorCount = 0; + while (errorResults.next()) { + errorCount++; } + assertThat( + String.format("%s errors encountered during validation should match the expected errors.", errorType), + errorCount, + equalTo(expectedCount) + ); } /** @@ -535,7 +618,12 @@ private void assertThatExportedGtfsMeetsExpectations( val, expectation )); - if (!val.equals(expectation)) { + if (val.isEmpty() && expectation == null) { + // First check that the csv value is an empty string and that the expectation is null. Null + // exported from the database to a csv should round trip into an empty string, so this meets the + // expectation. + break; + } else if (!val.equals(expectation)) { // sometimes there are slight differences in decimal precision in various fields // check if the decimal delta is acceptable if (equalsWithNumericDelta(val, recordExpectation)) continue; @@ -549,7 +637,7 @@ private void assertThatExportedGtfsMeetsExpectations( foundRecord = true; } } - assertThatPersistenceExpectationRecordWasFound(numRecordsSearched, foundRecord); + assertThatPersistenceExpectationRecordWasFound(numRecordsSearched, foundRecord, null); } } @@ -572,114 +660,33 @@ private boolean equalsWithNumericDelta(String val, RecordExpectation recordExpec /** * Helper method to make sure a persistence expectation was actually found after searching through records */ - private void assertThatPersistenceExpectationRecordWasFound(int numRecordsSearched, boolean foundRecord) { + private void assertThatPersistenceExpectationRecordWasFound( + int numRecordsSearched, + boolean foundRecord, + Multimap mismatches + ) { assertThat( "No records found in the ResultSet/CSV file", numRecordsSearched, ComparatorMatcherBuilder.usingNaturalOrdering().greaterThan(0) ); - assertThat( - "The record as defined in the PersistenceExpectation was not found.", - foundRecord, - equalTo(true) - ); - } - - /** - * A helper class to verify that data got stored in a particular table. - */ - private class PersistenceExpectation { - public String tableName; - // each persistence expectation has an array of record expectations which all must reference a single row - // if looking for multiple records in the same table, - // create numerous PersistenceExpectations with the same tableName - public RecordExpectation[] recordExpectations; - - - public PersistenceExpectation(String tableName, RecordExpectation[] recordExpectations) { - this.tableName = tableName; - this.recordExpectations = recordExpectations; - } - } - - private enum ExpectedFieldType { - INT, - DOUBLE, STRING - } - - /** - * A helper class to verify that data got stored in a particular record. - */ - private class RecordExpectation { - public double acceptedDelta; - public double doubleExpectation; - public String editorExpectation; - public ExpectedFieldType expectedFieldType; - public String fieldName; - public int intExpectation; - public String stringExpectation; - public boolean stringExpectationInCSV = false; - public boolean editorStringExpectation = false; - - public RecordExpectation(String fieldName, int intExpectation) { - this.fieldName = fieldName; - this.expectedFieldType = ExpectedFieldType.INT; - this.intExpectation = intExpectation; - } - - /** - * This extra constructor is a bit hacky in that it is only used for certain records that have - * an int type when stored in the database, but a string type when exported to GTFS - */ - public RecordExpectation(String fieldName, int intExpectation, String stringExpectation) { - this.fieldName = fieldName; - this.expectedFieldType = ExpectedFieldType.INT; - this.intExpectation = intExpectation; - this.stringExpectation = stringExpectation; - this.stringExpectationInCSV = true; - } - - /** - * This extra constructor is a also hacky in that it is only used for records that have - * an int type when stored in the database, and different values in the CSV export depending on - * whether or not it is a snapshot from the editor. Currently this only applies to stop_times.stop_sequence - */ - public RecordExpectation(String fieldName, int intExpectation, String stringExpectation, String editorExpectation) { - this.fieldName = fieldName; - this.expectedFieldType = ExpectedFieldType.INT; - this.intExpectation = intExpectation; - this.stringExpectation = stringExpectation; - this.stringExpectationInCSV = true; - this.editorStringExpectation = true; - this.editorExpectation = editorExpectation; - } - - public RecordExpectation(String fieldName, String stringExpectation) { - this.fieldName = fieldName; - this.expectedFieldType = ExpectedFieldType.STRING; - this.stringExpectation = stringExpectation; - } - - public RecordExpectation(String fieldName, double doubleExpectation, double acceptedDelta) { - this.fieldName = fieldName; - this.expectedFieldType = ExpectedFieldType.DOUBLE; - this.doubleExpectation = doubleExpectation; - this.acceptedDelta = acceptedDelta; - } - - public String getStringifiedExpectation(boolean fromEditor) { - if (fromEditor && editorStringExpectation) return editorExpectation; - if (stringExpectationInCSV) return stringExpectation; - switch (expectedFieldType) { - case DOUBLE: - return String.valueOf(doubleExpectation); - case INT: - return String.valueOf(intExpectation); - case STRING: - return stringExpectation; - default: - return null; + if (mismatches != null) { + for (String field : mismatches.keySet()) { + Collection valuePairs = mismatches.get(field); + for (ValuePair valuePair : valuePairs) { + assertThat( + String.format("The value expected for %s was not found", field), + valuePair.expected, + equalTo(valuePair.found) + ); + } } + } else { + assertThat( + "The record as defined in the PersistenceExpectation was not found.", + foundRecord, + equalTo(true) + ); } } } diff --git a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java index 44a418b4a..c7fab1fa7 100644 --- a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java +++ b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java @@ -229,14 +229,16 @@ public void canCreateUpdateAndDeleteFares() throws IOException, SQLException, In )); } - private void assertThatSqlQueryYieldsZeroRows(String sql) throws SQLException { + void assertThatSqlQueryYieldsZeroRows(String sql) throws SQLException { assertThatSqlQueryYieldsRowCount(sql, 0); } private void assertThatSqlQueryYieldsRowCount(String sql, int expectedRowCount) throws SQLException { LOG.info(sql); - ResultSet resultSet = testDataSource.getConnection().prepareStatement(sql).executeQuery(); - assertThat(resultSet.getFetchSize(), equalTo(expectedRowCount)); + int recordCount = 0; + ResultSet rs = testDataSource.getConnection().prepareStatement(sql).executeQuery(); + while (rs.next()) recordCount++; + assertThat("Records matching query should equal expected count.", recordCount, equalTo(expectedRowCount)); } @Test diff --git a/src/test/java/com/conveyal/gtfs/storage/ExpectedFieldType.java b/src/test/java/com/conveyal/gtfs/storage/ExpectedFieldType.java new file mode 100644 index 000000000..1eb604f04 --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/storage/ExpectedFieldType.java @@ -0,0 +1,6 @@ +package com.conveyal.gtfs.storage; + +public enum ExpectedFieldType { + INT, + DOUBLE, STRING +} diff --git a/src/test/java/com/conveyal/gtfs/storage/PersistenceExpectation.java b/src/test/java/com/conveyal/gtfs/storage/PersistenceExpectation.java new file mode 100644 index 000000000..1c1c1be7d --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/storage/PersistenceExpectation.java @@ -0,0 +1,26 @@ +package com.conveyal.gtfs.storage; + +import com.conveyal.gtfs.GTFSTest; + +/** + * A helper class to verify that data got stored in a particular table. + */ +public class PersistenceExpectation { + public String tableName; + /** + * Each persistence expectation has an array of record expectations which all must reference a single row. + * If looking for multiple records in the same table, create numerous PersistenceExpectations with the same + * tableName. + */ + public RecordExpectation[] recordExpectations; + + + public PersistenceExpectation(String tableName, RecordExpectation[] recordExpectations) { + this.tableName = tableName; + this.recordExpectations = recordExpectations; + } + + public static PersistenceExpectation[] list (PersistenceExpectation... expectations) { + return expectations; + } +} diff --git a/src/test/java/com/conveyal/gtfs/storage/RecordExpectation.java b/src/test/java/com/conveyal/gtfs/storage/RecordExpectation.java new file mode 100644 index 000000000..b3394f27f --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/storage/RecordExpectation.java @@ -0,0 +1,81 @@ +package com.conveyal.gtfs.storage; + +/** + * A helper class to verify that data got stored in a particular record. + */ +public class RecordExpectation { + public double acceptedDelta; + public double doubleExpectation; + public String editorExpectation; + public ExpectedFieldType expectedFieldType; + public String fieldName; + public int intExpectation; + public String stringExpectation; + public boolean stringExpectationInCSV = false; + public boolean editorStringExpectation = false; + + public RecordExpectation(String fieldName, int intExpectation) { + this.fieldName = fieldName; + this.expectedFieldType = ExpectedFieldType.INT; + this.intExpectation = intExpectation; + } + + public static RecordExpectation[] list(RecordExpectation... expectations) { + return expectations; + } + + /** + * This extra constructor is a bit hacky in that it is only used for certain records that have + * an int type when stored in the database, but a string type when exported to GTFS + */ + public RecordExpectation(String fieldName, int intExpectation, String stringExpectation) { + this.fieldName = fieldName; + this.expectedFieldType = ExpectedFieldType.INT; + this.intExpectation = intExpectation; + this.stringExpectation = stringExpectation; + this.stringExpectationInCSV = true; + } + + /** + * This extra constructor is a also hacky in that it is only used for records that have + * an int type when stored in the database, and different values in the CSV export depending on + * whether or not it is a snapshot from the editor. Currently this only applies to stop_times.stop_sequence + */ + public RecordExpectation(String fieldName, int intExpectation, String stringExpectation, String editorExpectation) { + this.fieldName = fieldName; + this.expectedFieldType = ExpectedFieldType.INT; + this.intExpectation = intExpectation; + this.stringExpectation = stringExpectation; + this.stringExpectationInCSV = true; + this.editorStringExpectation = true; + this.editorExpectation = editorExpectation; + } + + public RecordExpectation(String fieldName, String stringExpectation) { + this.fieldName = fieldName; + this.expectedFieldType = ExpectedFieldType.STRING; + this.stringExpectation = stringExpectation; + } + + public RecordExpectation(String fieldName, double doubleExpectation, double acceptedDelta) { + this.fieldName = fieldName; + this.expectedFieldType = ExpectedFieldType.DOUBLE; + this.doubleExpectation = doubleExpectation; + this.acceptedDelta = acceptedDelta; + } + + public String getStringifiedExpectation(boolean fromEditor) { + if (fromEditor && editorStringExpectation) return editorExpectation; + if (stringExpectationInCSV) return stringExpectation; + switch (expectedFieldType) { + case DOUBLE: + return String.valueOf(doubleExpectation); + case INT: + return String.valueOf(intExpectation); + case STRING: + return stringExpectation; + default: + return null; + } + } +} diff --git a/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt b/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt index a8b109280..59a2623b9 100755 --- a/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt +++ b/src/test/resources/fake-agency-bad-calendar-date/stop_times.txt @@ -1,5 +1,7 @@ trip_id,arrival_time,departure_time,stop_id,stop_sequence,stop_headsign,pickup_type,drop_off_type,shape_dist_traveled,timepoint a30277f8-e50a-4a85-9141-b1e0da9d429d,07:00:00,07:00:00,4u6g,1,,0,0,0.0000000, a30277f8-e50a-4a85-9141-b1e0da9d429d,07:01:00,07:01:00,johv,2,,0,0,341.4491961, +2,09:00:00,09:00:00,4u6g,1,,0,0,0.0000000, +2,09:01:00,09:01:00,johv,2,,0,0,341.4491961, frequency-trip,08:00:00,08:00:00,4u6g,1,,0,0,0.0000000, frequency-trip,08:01:00,08:01:00,johv,2,,0,0,341.4491961, diff --git a/src/test/resources/fake-agency-bad-calendar-date/trips.txt b/src/test/resources/fake-agency-bad-calendar-date/trips.txt index eab14b86d..f0ecc68ae 100755 --- a/src/test/resources/fake-agency-bad-calendar-date/trips.txt +++ b/src/test/resources/fake-agency-bad-calendar-date/trips.txt @@ -1,3 +1,4 @@ route_id,trip_id,trip_headsign,trip_short_name,direction_id,block_id,shape_id,bikes_allowed,wheelchair_accessible,service_id 1,a30277f8-e50a-4a85-9141-b1e0da9d429d,,,0,,5820f377-f947-4728-ac29-ac0102cbc34e,0,0,04100312-8fe1-46a5-a9f2-556f39478f57 +1,2,,,0,,5820f377-f947-4728-ac29-ac0102cbc34e,0,0,a 1,frequency-trip,,,0,,5820f377-f947-4728-ac29-ac0102cbc34e,0,0,04100312-8fe1-46a5-a9f2-556f39478f57 \ No newline at end of file From a3623fb6748eaa68ce95e53e316b4550529d2c22 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Mon, 12 Nov 2018 10:51:49 -0500 Subject: [PATCH 05/24] test: fix bad method signature --- src/test/java/com/conveyal/gtfs/GTFSTest.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/test/java/com/conveyal/gtfs/GTFSTest.java b/src/test/java/com/conveyal/gtfs/GTFSTest.java index fb408c248..2f4fa4af3 100644 --- a/src/test/java/com/conveyal/gtfs/GTFSTest.java +++ b/src/test/java/com/conveyal/gtfs/GTFSTest.java @@ -414,8 +414,7 @@ private String zipFolderAndLoadGTFS(String folderName, DataSource dataSource, Pe DataSource ds = GTFS.createDataSource( dataSource.getConnection().getMetaData().getURL(), null, - null, - true + null ); assertThatLoadIsErrorFree(loadResult); assertThat(validationResult.fatalException, is(nullValue())); From c874bc287126dde0b2327b70bb562fc046b81d51 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Mon, 12 Nov 2018 13:08:34 -0500 Subject: [PATCH 06/24] test: handle null date on snapshot --- .../java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java | 5 +++++ .../java/com/conveyal/gtfs/validator/ServiceValidator.java | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java index 739a3c0b7..2ae00d35c 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java @@ -217,6 +217,11 @@ private TableLoadResult createScheduleExceptionsTable() { HashMap> addedServiceDays = new HashMap<>(); List datesWithExceptions = new ArrayList<>(); for (CalendarDate calendarDate : calendarDates) { + // Skip any null dates + if (calendarDate.date == null) { + LOG.warn("Encountered calendar date record with null value for date field. Skipping."); + continue; + } String date = calendarDate.date.format(DateTimeFormatter.BASIC_ISO_DATE); datesWithExceptions.add(date); if (calendarDate.exception_type == 1) { diff --git a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java index 9e7eeca79..5ab5b2ed1 100644 --- a/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/ServiceValidator.java @@ -117,8 +117,7 @@ public void complete(ValidationResult validationResult) { } } } catch (Exception ex) { - LOG.error("Error validating service entries (merging calendars and calendar_dates)"); - ex.printStackTrace(); + LOG.error("Error validating service entries (merging calendars and calendar_dates)", ex); // Continue on to next calendar entry. } } From a4d43a24ee157702b849090240c1ab4fb67a5edf Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 12:27:00 -0500 Subject: [PATCH 07/24] fix(shape-edits): update trip#shape_id if pattern value changes refs conveyal/datatools-ui#385; refs #109 --- .../conveyal/gtfs/loader/JdbcTableWriter.java | 91 ++++++++++++++----- .../java/com/conveyal/gtfs/loader/Table.java | 4 +- 2 files changed, 69 insertions(+), 26 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index 94f77418d..9a1c1a05d 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -77,9 +77,11 @@ public JdbcTableWriter ( // TODO: verify specTable.name is ok to use for constructing dynamic sql statements this.specTable = specTable; + // Below is a bit messy because the connection field on this class is set to final and we cannot set this to + // the optionally passed-in connection with the ternary statement unless connection1 already exists. Connection connection1; try { - connection1 = dataSource.getConnection(); + connection1 = this.dataSource.getConnection(); } catch (SQLException e) { e.printStackTrace(); connection1 = null; @@ -170,11 +172,34 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce updateChildTable(childEntitiesArray, entityId, isCreating, referencingTable, connection); } } - // Iterate over table's fields and apply linked values to any tables - if ("routes".equals(specTable.name)) { - updateLinkedFields(specTable, jsonObject, "trips", "route_id", "wheelchair_accessible"); - } else if ("patterns".equals(specTable.name)) { - updateLinkedFields(specTable, jsonObject, "trips", "pattern_id", "direction_id"); + // Iterate over table's fields and apply linked values to any tables. This is to account for "exemplar" + // fields that exist in one place in our tables, but are duplicated in GTFS. For example, we have a + // Route#wheelchair_accessible field, which is used to set the Trip#wheelchair_accessible values for all + // trips on a route. + // NOTE: pattern_stops linked fields are updated in the updateChildTable method. + switch (specTable.name) { + case "routes": + updateLinkedFields( + specTable, + jsonObject, + "trips", + "route_id", + "wheelchair_accessible" + ); + break; + case "patterns": + updateLinkedFields( + specTable, + jsonObject, + "trips", + "pattern_id", + "direction_id", "shape_id" + ); + break; + default: + LOG.debug("No linked fields to update."); + // Do nothing. + break; } if (autoCommit) { // If nothing failed up to this point, it is safe to assume there were no problems updating/creating the @@ -205,6 +230,7 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce * the related table (e.g., pattern_stop#timepoint should update all of its related stop_times). */ private void updateLinkedFields(Table referenceTable, ObjectNode jsonObject, String tableName, String keyField, String ...fieldNames) throws SQLException { + boolean updatingStopTimes = "stop_times".equals(tableName); // Collect fields, the JSON values for these fields, and the strings to add to the prepared statement into Lists. List fields = new ArrayList<>(); List values = new ArrayList<>(); @@ -216,12 +242,17 @@ private void updateLinkedFields(Table referenceTable, ObjectNode jsonObject, Str } String setFields = String.join(", ", fieldStrings); // If updating stop_times, use a more complex query that joins trips to stop_times in order to match on pattern_id - boolean updatingStopTimes = "stop_times".equals(tableName); Field orderField = updatingStopTimes ? referenceTable.getFieldForName(referenceTable.getOrderFieldName()) : null; String sql = updatingStopTimes - ? String.format("update %s.stop_times st set %s from %s.trips t " + - "where st.trip_id = t.trip_id AND t.%s = ? AND st.%s = ?", - tablePrefix, setFields, tablePrefix, keyField, orderField.name) + ? String.format( + "update %s.stop_times st set %s from %s.trips t " + + "where st.trip_id = t.trip_id AND t.%s = ? AND st.%s = ?", + tablePrefix, + setFields, + tablePrefix, + keyField, + orderField.name + ) : String.format("update %s.%s set %s where %s = ?", tablePrefix, tableName, setFields, keyField); // Prepare the statement and set statement parameters PreparedStatement statement = connection.prepareStatement(sql); @@ -371,8 +402,6 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat // Get parent entity's key value keyValue = getValueForId(id, keyField.name, tablePrefix, specTable, connection); String childTableName = String.join(".", tablePrefix, subTable.name); - // FIXME: add check for pattern stop consistency. - // FIXME: re-order stop times if pattern stop order changes. // Reconciling pattern stops MUST happen before original pattern stops are deleted in below block (with // getUpdateReferencesSql) if ("pattern_stops".equals(subTable.name)) { @@ -390,15 +419,14 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat } reconcilePatternStops(keyValue, newPatternStops, connection); } - // FIXME: allow shapes to be updated on pattern geometry change. if (!isCreatingNewEntity) { // Delete existing sub-entities for given entity ID if the parent entity is not being newly created. String deleteSql = getUpdateReferencesSql(SqlMethod.DELETE, childTableName, keyField, keyValue, null); LOG.info(deleteSql); Statement statement = connection.createStatement(); // FIXME: Use copy on update for a pattern's shape instead of deleting the previous shape and replacing it. - // This would better account for GTFS data loaded from a file where multiple patterns reference a single - // shape. + // This would better account for GTFS data loaded from a file where multiple patterns reference a single + // shape. int result = statement.executeUpdate(deleteSql); LOG.info("Deleted {} {}", result, subTable.name); // FIXME: are there cases when an update should not return zero? @@ -436,7 +464,7 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat // If handling first iteration, create the prepared statement (later iterations will add to batch). insertStatement = createPreparedUpdate(id, true, subEntity, subTable, connection, true); } - // Update linked stop times fields for updated pattern stop (e.g., timepoint, pickup/drop off type). + // Update linked stop times fields for each updated pattern stop (e.g., timepoint, pickup/drop off type). if ("pattern_stops".equals(subTable.name)) { updateLinkedFields( subTable, @@ -610,8 +638,13 @@ else if (i == newStops.size() - 1 || !originalStopIds.get(i).equals(newStops.get } // Increment sequences for stops that follow the inserted location (including the stop at the changed index). // NOTE: This should happen before the blank stop time insertion for logical consistency. - String updateSql = String.format("update %s.stop_times set stop_sequence = stop_sequence + 1 from %s.trips where stop_sequence >= %d AND %s", - tablePrefix, tablePrefix, differenceLocation, joinToTrips); + String updateSql = String.format( + "update %s.stop_times set stop_sequence = stop_sequence + 1 from %s.trips where stop_sequence >= %d AND %s", + tablePrefix, + tablePrefix, + differenceLocation, + joinToTrips + ); LOG.info(updateSql); PreparedStatement updateStatement = connection.prepareStatement(updateSql); int updated = updateStatement.executeUpdate(); @@ -638,13 +671,23 @@ else if (originalStopIds.size() == newStops.size() + 1) { } } // Delete stop at difference location - String deleteSql = String.format("delete from %s.stop_times using %s.trips where stop_sequence = %d AND %s", - tablePrefix, tablePrefix, differenceLocation, joinToTrips); + String deleteSql = String.format( + "delete from %s.stop_times using %s.trips where stop_sequence = %d AND %s", + tablePrefix, + tablePrefix, + differenceLocation, + joinToTrips + ); LOG.info(deleteSql); PreparedStatement deleteStatement = connection.prepareStatement(deleteSql); // Decrement all stops with sequence greater than difference location - String updateSql = String.format("update %s.stop_times set stop_sequence = stop_sequence - 1 from %s.trips where stop_sequence > %d AND %s", - tablePrefix, tablePrefix, differenceLocation, joinToTrips); + String updateSql = String.format( + "update %s.stop_times set stop_sequence = stop_sequence - 1 from %s.trips where stop_sequence > %d AND %s", + tablePrefix, + tablePrefix, + differenceLocation, + joinToTrips + ); LOG.info(updateSql); PreparedStatement updateStatement = connection.prepareStatement(updateSql); int deleted = deleteStatement.executeUpdate(); @@ -764,9 +807,7 @@ else if (originalStopIds.size() < newStops.size()) { insertBlankStopTimes(tripsForPattern, newStops, firstDifferentIndex, stopsToInsert, connection); } // ANY OTHER TYPE OF MODIFICATION IS NOT SUPPORTED - else { - throw new IllegalStateException(RECONCILE_STOPS_ERROR_MSG); - } + else throw new IllegalStateException(RECONCILE_STOPS_ERROR_MSG); } /** diff --git a/src/main/java/com/conveyal/gtfs/loader/Table.java b/src/main/java/com/conveyal/gtfs/loader/Table.java index 9c6b94b7b..0f2a91175 100644 --- a/src/main/java/com/conveyal/gtfs/loader/Table.java +++ b/src/main/java/com/conveyal/gtfs/loader/Table.java @@ -163,6 +163,7 @@ public Table (String name, Class entityClass, Requirement requ new ColorField("route_text_color", OPTIONAL), // Editor fields below. new ShortField("publicly_visible", EDITOR, 1), + // wheelchair_accessible is an exemplar field applied to all trips on a route. new ShortField("wheelchair_accessible", EDITOR, 2).permitEmptyValue(), new IntegerField("route_sort_order", OPTIONAL, 0, Integer.MAX_VALUE), // Status values are In progress (0), Pending approval (1), and Approved (2). @@ -196,7 +197,8 @@ public Table (String name, Class entityClass, Requirement requ new StringField("pattern_id", REQUIRED), new StringField("route_id", REQUIRED).isReferenceTo(ROUTES), new StringField("name", OPTIONAL), - // Editor-specific fields + // Editor-specific fields. + // direction_id and shape_id are exemplar fields applied to all trips for a pattern. new ShortField("direction_id", EDITOR, 1), new ShortField("use_frequency", EDITOR, 1), new StringField("shape_id", EDITOR).isReferenceTo(SHAPES) From 07e0e29cc0d66638835f659e948305e17093d1d8 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 13:48:06 -0500 Subject: [PATCH 08/24] style: add new lines to reduce line width --- .../conveyal/gtfs/loader/JdbcTableWriter.java | 128 +++++++++++++++--- 1 file changed, 109 insertions(+), 19 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index 9a1c1a05d..6b793fdf8 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -229,7 +229,13 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce * tables (for now just fields in trips and stop_times) where the reference table's value should take precedence over * the related table (e.g., pattern_stop#timepoint should update all of its related stop_times). */ - private void updateLinkedFields(Table referenceTable, ObjectNode jsonObject, String tableName, String keyField, String ...fieldNames) throws SQLException { + private void updateLinkedFields( + Table referenceTable, + ObjectNode jsonObject, + String tableName, + String keyField, + String ...fieldNames + ) throws SQLException { boolean updatingStopTimes = "stop_times".equals(tableName); // Collect fields, the JSON values for these fields, and the strings to add to the prepared statement into Lists. List fields = new ArrayList<>(); @@ -283,7 +289,14 @@ private void updateLinkedFields(Table referenceTable, ObjectNode jsonObject, Str * object here is provided as a positional argument (rather than provided via the JdbcTableWriter instance field) * because this method is used to update both the specTable for the primary entity and any relevant child entities. */ - private PreparedStatement createPreparedUpdate(Integer id, boolean isCreating, ObjectNode jsonObject, Table table, Connection connection, boolean batch) throws SQLException { + private PreparedStatement createPreparedUpdate( + Integer id, + boolean isCreating, + ObjectNode jsonObject, + Table table, + Connection connection, + boolean batch + ) throws SQLException { String statementString; if (isCreating) { statementString = table.generateInsertSql(tablePrefix, true); @@ -306,7 +319,12 @@ private PreparedStatement createPreparedUpdate(Integer id, boolean isCreating, O * taken from JSON. Note, string values are used here in order to take advantage of setParameter method on * individual fields, which handles parsing string and non-string values into the appropriate SQL field types. */ - private void setStatementParameters(ObjectNode jsonObject, Table table, PreparedStatement preparedStatement, Connection connection) throws SQLException { + private void setStatementParameters( + ObjectNode jsonObject, + Table table, + PreparedStatement preparedStatement, + Connection connection + ) throws SQLException { // JDBC SQL statements use a one-based index for setting fields/parameters List missingFieldNames = new ArrayList<>(); int index = 1; @@ -380,7 +398,13 @@ private void setStatementParameters(ObjectNode jsonObject, Table table, Prepared } if (missingFieldNames.size() > 0) { // String joinedFieldNames = missingFieldNames.stream().collect(Collectors.joining(", ")); - throw new SQLException(String.format("The following field(s) are missing from JSON %s object: %s", table.name, missingFieldNames.toString())); + throw new SQLException( + String.format( + "The following field(s) are missing from JSON %s object: %s", + table.name, + missingFieldNames.toString() + ) + ); } } @@ -486,13 +510,15 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat boolean orderIsUnique = orderValues.add(orderValue); boolean valuesAreIncrementing = ++previousOrder == orderValue; if (!orderIsUnique || !valuesAreIncrementing) { - throw new SQLException(String.format( + throw new SQLException( + String.format( "%s %s values must be zero-based, unique, and incrementing. Entity at index %d had %s value of %d", subTable.name, orderFieldName, entityCount, previousOrder == 0 ? "non-zero" : !valuesAreIncrementing ? "non-incrementing" : "duplicate", - orderValue) + orderValue + ) ); } } @@ -814,7 +840,13 @@ else if (originalStopIds.size() < newStops.size()) { * Check the stops in the changed region to ensure they remain in the same order. If not, throw an exception to * cancel the transaction. */ - private static void verifyInteriorStopsAreUnchanged(List originalStopIds, List newStops, int firstDifferentIndex, int lastDifferentIndex, boolean movedRight) { + private static void verifyInteriorStopsAreUnchanged( + List originalStopIds, + List newStops, + int firstDifferentIndex, + int lastDifferentIndex, + boolean movedRight + ) { //Stops mapped to list of stop IDs simply for easier viewing/comparison with original IDs while debugging with // breakpoints. List newStopIds = newStops.stream().map(s -> s.stop_id).collect(Collectors.toList()); @@ -839,7 +871,13 @@ private static void verifyInteriorStopsAreUnchanged(List originalStopIds * You must call this method after updating sequences for any stop times following the starting stop sequence to * avoid overwriting these other stop times. */ - private void insertBlankStopTimes(List tripIds, List newStops, int startingStopSequence, int stopTimesToAdd, Connection connection) throws SQLException { + private void insertBlankStopTimes( + List tripIds, + List newStops, + int startingStopSequence, + int stopTimesToAdd, + Connection connection + ) throws SQLException { if (tripIds.isEmpty()) { // There is no need to insert blank stop times if there are no trips for the pattern. return; @@ -999,7 +1037,13 @@ private static long handleStatementExecution(PreparedStatement statement, boolea * * FIXME: add more detail/precise language on what this method actually does */ - private static void ensureReferentialIntegrity(Connection connection, ObjectNode jsonObject, String namespace, Table table, Integer id) throws SQLException { + private static void ensureReferentialIntegrity( + Connection connection, + ObjectNode jsonObject, + String namespace, + Table table, + Integer id + ) throws SQLException { final boolean isCreating = id == null; String keyField = table.getKeyFieldName(); String tableName = String.join(".", namespace, table.name); @@ -1075,7 +1119,12 @@ private static int getRowCount(String tableName, Connection connection) throws S /** * For some condition (where field = string value), return the set of unique int IDs for the records that match. */ - private static TIntSet getIdsForCondition(String tableName, String keyField, String keyValue, Connection connection) throws SQLException { + private static TIntSet getIdsForCondition( + String tableName, + String keyField, + String keyValue, + Connection connection + ) throws SQLException { String idCheckSql = String.format("select id from %s where %s = ?", tableName, keyField); // Create statement for counting rows selected PreparedStatement statement = connection.prepareStatement(idCheckSql); @@ -1149,7 +1198,13 @@ private static String getValueForId(int id, String fieldName, String namespace, * not necessarily delete a shape that is shared by multiple trips)? I think not because we are skipping foreign refs * found in the table for the entity being updated/deleted. [Leaving this comment in place for now though.] */ - private static void updateReferencingTables(String namespace, Table table, Connection connection, int id, String newKeyValue) throws SQLException { + private static void updateReferencingTables( + String namespace, + Table table, + Connection connection, + int id, + String newKeyValue + ) throws SQLException { Field keyField = table.getFieldForName(table.getKeyFieldName()); Class entityClass = table.getEntityClass(); // Determine method (update vs. delete) depending on presence of newKeyValue field. @@ -1206,10 +1261,18 @@ private static void updateReferencingTables(String namespace, Table table, Conne if (table.isCascadeDeleteRestricted()) { // The entity must not have any referencing entities in order to delete it. connection.rollback(); - // List idStrings = new ArrayList<>(); - // uniqueIds.forEach(uniqueId -> idStrings.add(String.valueOf(uniqueId))); - // String message = String.format("Cannot delete %s %s=%s. %d %s reference this %s (%s).", entityClass.getSimpleName(), keyField, keyValue, result, referencingTable.name, entityClass.getSimpleName(), String.join(",", idStrings)); - String message = String.format("Cannot delete %s %s=%s. %d %s reference this %s.", entityClass.getSimpleName(), keyField.name, keyValue, result, referencingTable.name, entityClass.getSimpleName()); + // List idStrings = new ArrayList<>(); + // uniqueIds.forEach(uniqueId -> idStrings.add(String.valueOf(uniqueId))); + // String message = String.format("Cannot delete %s %s=%s. %d %s reference this %s (%s).", entityClass.getSimpleName(), keyField, keyValue, result, referencingTable.name, entityClass.getSimpleName(), String.join(",", idStrings)); + String message = String.format( + "Cannot delete %s %s=%s. %d %s reference this %s.", + entityClass.getSimpleName(), + keyField.name, + keyValue, + result, + referencingTable.name, + entityClass.getSimpleName() + ); LOG.warn(message); throw new SQLException(message); } @@ -1226,12 +1289,23 @@ private static void updateReferencingTables(String namespace, Table table, Conne /** * Constructs SQL string based on method provided. */ - private static String getUpdateReferencesSql(SqlMethod sqlMethod, String refTableName, Field keyField, String keyValue, String newKeyValue) throws SQLException { + private static String getUpdateReferencesSql( + SqlMethod sqlMethod, + String refTableName, + Field keyField, + String keyValue, + String newKeyValue + ) throws SQLException { boolean isArrayField = keyField.getSqlType().equals(JDBCType.ARRAY); switch (sqlMethod) { case DELETE: if (isArrayField) { - return String.format("delete from %s where %s @> ARRAY['%s']::text[]", refTableName, keyField.name, keyValue); + return String.format( + "delete from %s where %s @> ARRAY['%s']::text[]", + refTableName, + keyField.name, + keyValue + ); } else { return String.format("delete from %s where %s = '%s'", refTableName, keyField.name, keyValue); } @@ -1240,9 +1314,25 @@ private static String getUpdateReferencesSql(SqlMethod sqlMethod, String refTabl // If the field to be updated is an array field (of which there are only text[] types in the db), // replace the old value with the new value using array contains clause. // FIXME This is probably horribly postgres specific. - return String.format("update %s set %s = array_replace(%s, '%s', '%s') where %s @> ARRAY['%s']::text[]", refTableName, keyField.name, keyField.name, keyValue, newKeyValue, keyField.name, keyValue); + return String.format( + "update %s set %s = array_replace(%s, '%s', '%s') where %s @> ARRAY['%s']::text[]", + refTableName, + keyField.name, + keyField.name, + keyValue, + newKeyValue, + keyField.name, + keyValue + ); } else { - return String.format("update %s set %s = '%s' where %s = '%s'", refTableName, keyField.name, newKeyValue, keyField.name, keyValue); + return String.format( + "update %s set %s = '%s' where %s = '%s'", + refTableName, + keyField.name, + newKeyValue, + keyField.name, + keyValue + ); } // case CREATE: // return String.format("insert into %s "); From f77397253dcfa340fbd7d231a9415996a4f73b57 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 14:53:31 -0500 Subject: [PATCH 09/24] fix(editor): update stop_time travel times when frequency pattern is updated fixes #165 --- .../conveyal/gtfs/loader/JdbcTableWriter.java | 97 +++++++++++++++---- 1 file changed, 77 insertions(+), 20 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index 6b793fdf8..bfed055c6 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -169,7 +169,14 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce int entityId = isCreating ? (int) newId : id; // Cast child entities to array node to iterate over. ArrayNode childEntitiesArray = (ArrayNode)childEntities; - updateChildTable(childEntitiesArray, entityId, isCreating, referencingTable, connection); + updateChildTable( + childEntitiesArray, + entityId, + jsonObject.get("use_frequency").asBoolean(), + isCreating, + referencingTable, + connection + ); } } // Iterate over table's fields and apply linked values to any tables. This is to account for "exemplar" @@ -231,19 +238,19 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce */ private void updateLinkedFields( Table referenceTable, - ObjectNode jsonObject, - String tableName, + ObjectNode exemplarEntity, + String linkedTableName, String keyField, - String ...fieldNames + String ...linkedFieldsToUpdate ) throws SQLException { - boolean updatingStopTimes = "stop_times".equals(tableName); + boolean updatingStopTimes = "stop_times".equals(linkedTableName); // Collect fields, the JSON values for these fields, and the strings to add to the prepared statement into Lists. List fields = new ArrayList<>(); List values = new ArrayList<>(); List fieldStrings = new ArrayList<>(); - for (String field : fieldNames) { + for (String field : linkedFieldsToUpdate) { fields.add(referenceTable.getFieldForName(field)); - values.add(jsonObject.get(field)); + values.add(exemplarEntity.get(field)); fieldStrings.add(String.format("%s = ?", field)); } String setFields = String.join(", ", fieldStrings); @@ -259,7 +266,7 @@ private void updateLinkedFields( keyField, orderField.name ) - : String.format("update %s.%s set %s where %s = ?", tablePrefix, tableName, setFields, keyField); + : String.format("update %s.%s set %s where %s = ?", tablePrefix, linkedTableName, setFields, keyField); // Prepare the statement and set statement parameters PreparedStatement statement = connection.prepareStatement(sql); int oneBasedIndex = 1; @@ -271,16 +278,16 @@ private void updateLinkedFields( else field.setParameter(statement, oneBasedIndex++, newValue); } // Set "where clause" with value for key field (e.g., set values where pattern_id = '3') - statement.setString(oneBasedIndex++, jsonObject.get(keyField).asText()); + statement.setString(oneBasedIndex++, exemplarEntity.get(keyField).asText()); if (updatingStopTimes) { // If updating stop times set the order field parameter (stop_sequence) - String orderValue = jsonObject.get(orderField.name).asText(); + String orderValue = exemplarEntity.get(orderField.name).asText(); orderField.setParameter(statement, oneBasedIndex++, orderValue); } // Log query, execute statement, and log result. LOG.debug(statement.toString()); int entitiesUpdated = statement.executeUpdate(); - LOG.debug("{} {} linked fields updated", entitiesUpdated, tableName); + LOG.debug("{} {} linked fields updated", entitiesUpdated, linkedTableName); } /** @@ -417,7 +424,14 @@ private void setStatementParameters( * have a hierarchical relationship. * FIXME develop a better way to update tables with foreign keys to the table being updated. */ - private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreatingNewEntity, Table subTable, Connection connection) throws SQLException, IOException { + private void updateChildTable( + ArrayNode subEntities, + Integer id, + boolean foreignReferenceIsFrequencyPattern, + boolean isCreatingNewEntity, + Table subTable, + Connection connection + ) throws SQLException, IOException { // Get parent table's key field Field keyField; String keyValue; @@ -464,6 +478,7 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat int previousOrder = -1; TIntSet orderValues = new TIntHashSet(); Multimap referencesPerTable = HashMultimap.create(); + int cumulativeTravelTime = 0; for (JsonNode entityNode : subEntities) { // Cast entity node to ObjectNode to allow mutations (JsonNode is immutable). ObjectNode subEntity = (ObjectNode)entityNode; @@ -490,15 +505,22 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat } // Update linked stop times fields for each updated pattern stop (e.g., timepoint, pickup/drop off type). if ("pattern_stops".equals(subTable.name)) { + if (foreignReferenceIsFrequencyPattern) { + // Update stop times linked to pattern stop if the pattern uses frequencies and accumulate time. + // Default travel and dwell time behave as "linked fields" for associated stop times. In other + // words, frequency trips in the editor must match the pattern stop travel times. + cumulativeTravelTime += updateStopTimesForPatternStop(subEntity, cumulativeTravelTime); + } + // These fields should be updated for all patterns (e.g., timepoint, pickup/drop off type). updateLinkedFields( - subTable, - subEntity, - "stop_times", - "pattern_id", - "timepoint", - "drop_off_type", - "pickup_type", - "shape_dist_traveled" + subTable, + subEntity, + "stop_times", + "pattern_id", + "timepoint", + "drop_off_type", + "pickup_type", + "shape_dist_traveled" ); } setStatementParameters(subEntity, subTable, insertStatement, connection); @@ -546,6 +568,41 @@ private void updateChildTable(ArrayNode subEntities, Integer id, boolean isCreat } } + /** + * Updates the stop times that reference the specified pattern stop. + * @param patternStop the pattern stop for which to update stop times + * @param previousTravelTime the travel time accumulated up to the previous stop_time's departure time (or the + * previous pattern stop's dwell time) + * @return the travel and dwell time added by this pattern stop + * @throws SQLException + */ + private int updateStopTimesForPatternStop(ObjectNode patternStop, int previousTravelTime) throws SQLException { + String sql = String.format( + "update %s.stop_times st set arrival_time = ?, departure_time = ? from %s.trips t " + + "where st.trip_id = t.trip_id AND t.%s = ? AND st.%s = ?", + tablePrefix, + tablePrefix, + "pattern_id", + "stop_sequence" + ); + // Prepare the statement and set statement parameters + PreparedStatement statement = connection.prepareStatement(sql); + int oneBasedIndex = 1; + int travelTime = patternStop.get("default_travel_time").asInt(); + int arrivalTime = previousTravelTime + travelTime; + statement.setInt(oneBasedIndex++, arrivalTime); + int dwellTime = patternStop.get("default_dwell_time").asInt(); + statement.setInt(oneBasedIndex++, arrivalTime + dwellTime); + // Set "where clause" with value for pattern_id and stop_sequence + statement.setString(oneBasedIndex++, patternStop.get("pattern_id").asText()); + statement.setInt(oneBasedIndex++, patternStop.get("stop_sequence").asInt()); + // Log query, execute statement, and log result. + LOG.debug(statement.toString()); + int entitiesUpdated = statement.executeUpdate(); + LOG.debug("{} stop_time arrivals/departures updated", entitiesUpdated); + return travelTime + dwellTime; + } + /** * Checks that a set of string references to a set of reference tables are all valid. For each set of references * mapped to a reference table, the method queries for all of the references. If there are any references that were From 903aebd0d7602091acd4ad4d2215271921ea1e5d Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 15:00:27 -0500 Subject: [PATCH 10/24] refactor(editor): fix NPE --- src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index bfed055c6..7a3c1b571 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -169,10 +169,12 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce int entityId = isCreating ? (int) newId : id; // Cast child entities to array node to iterate over. ArrayNode childEntitiesArray = (ArrayNode)childEntities; + boolean foreignReferenceIsFrequencyPattern = jsonObject.has("use_frequency") && + jsonObject.get("use_frequency").asBoolean(); updateChildTable( childEntitiesArray, entityId, - jsonObject.get("use_frequency").asBoolean(), + foreignReferenceIsFrequencyPattern, isCreating, referencingTable, connection From a9ad0f6fe55aa1b404bdb4ca54260ea7e11dc19b Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Tue, 18 Dec 2018 15:07:01 -0500 Subject: [PATCH 11/24] refactor: change method arg from Integer -> int --- src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index 7a3c1b571..51aed6726 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -428,7 +428,7 @@ private void setStatementParameters( */ private void updateChildTable( ArrayNode subEntities, - Integer id, + int id, boolean foreignReferenceIsFrequencyPattern, boolean isCreatingNewEntity, Table subTable, From b96299af424152019f0b18bcc9915777fa4a0734 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Wed, 19 Dec 2018 14:39:18 -0500 Subject: [PATCH 12/24] feat(validator): validate stop_time#shape_dist_traveled for increasing values fixes #163 --- .../java/com/conveyal/gtfs/model/StopTime.java | 2 +- .../gtfs/validator/SpeedTripValidator.java | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/conveyal/gtfs/model/StopTime.java b/src/main/java/com/conveyal/gtfs/model/StopTime.java index 37fd5d2d8..d69078e34 100644 --- a/src/main/java/com/conveyal/gtfs/model/StopTime.java +++ b/src/main/java/com/conveyal/gtfs/model/StopTime.java @@ -26,7 +26,7 @@ public class StopTime extends Entity implements Cloneable, Serializable { public String stop_headsign; public int pickup_type; public int drop_off_type; - public double shape_dist_traveled; + public double shape_dist_traveled = DOUBLE_MISSING; public int timepoint = INT_MISSING; @Override diff --git a/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java b/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java index 46bc39d14..6a89163ae 100644 --- a/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/SpeedTripValidator.java @@ -3,6 +3,7 @@ import com.conveyal.gtfs.error.NewGTFSError; import com.conveyal.gtfs.error.SQLErrorStorage; import com.conveyal.gtfs.loader.Feed; +import com.conveyal.gtfs.model.Entity; import com.conveyal.gtfs.model.Route; import com.conveyal.gtfs.model.Stop; import com.conveyal.gtfs.model.StopTime; @@ -49,6 +50,8 @@ public void validateTrip(Trip trip, Route route, List stopTimes, List< Stop currStop = stops.get(i); // Distance is accumulated in case times are not provided for some StopTimes. distanceMeters += fastDistance(currStop.stop_lat, currStop.stop_lon, prevStop.stop_lat, prevStop.stop_lon); + // Check that shape_dist_traveled is increasing. + checkShapeDistTraveled(prevStopTime, currStopTime); if (missingBothTimes(currStopTime)) { // FixMissingTimes has already been called, so both arrival and departure time are missing. // The spec allows this. Other than accumulating distance, skip this StopTime. @@ -82,6 +85,20 @@ public void validateTrip(Trip trip, Route route, List stopTimes, List< } } + /** + * Register shape dist traveled error if previous stop time's value was missing and the current value is + * not (if at least one stop time has a value, all stop times for the trip should) OR if current value + * is not greater than previous value. + */ + private void checkShapeDistTraveled(StopTime previous, StopTime current) { + if ( + (previous.shape_dist_traveled == Entity.DOUBLE_MISSING && current.shape_dist_traveled != Entity.DOUBLE_MISSING) || + current.shape_dist_traveled <= previous.shape_dist_traveled + ) { + registerError(current, SHAPE_DIST_TRAVELED_NOT_INCREASING, current.shape_dist_traveled); + } + } + /** * Completing this feed validator means checking if there were any unrounded travel times in the feed and (if so) * registering any zero travel time errors that were passed over before the first unrounded travel time was From e07f019345c42e8acd8ab278ac88364868d78ceb Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Wed, 19 Dec 2018 14:59:56 -0500 Subject: [PATCH 13/24] fix(frequencies): increase headway seconds max to 6 hours closes #148 --- src/main/java/com/conveyal/gtfs/loader/Table.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/Table.java b/src/main/java/com/conveyal/gtfs/loader/Table.java index 9c6b94b7b..3953e1fe5 100644 --- a/src/main/java/com/conveyal/gtfs/loader/Table.java +++ b/src/main/java/com/conveyal/gtfs/loader/Table.java @@ -281,7 +281,10 @@ public Table (String name, Class entityClass, Requirement requ new StringField("trip_id", REQUIRED).isReferenceTo(TRIPS), new TimeField("start_time", REQUIRED), new TimeField("end_time", REQUIRED), - new IntegerField("headway_secs", REQUIRED, 20, 60*60*2), + // Set max headway seconds to the equivalent of 6 hours. This should leave space for any very long headways + // (e.g., a ferry running exact times at a 4 hour headway), but will catch cases where milliseconds were + // exported accidentally. + new IntegerField("headway_secs", REQUIRED, 20, 60*60*6), new IntegerField("exact_times", OPTIONAL, 1)) .withParentTable(TRIPS) .keyFieldIsNotUnique(); From 57566ec8876faf2bf5b60b9923381657d2034709 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Wed, 19 Dec 2018 16:49:12 -0500 Subject: [PATCH 14/24] feat(validator): validate stop names and trip headsigns refs #167 --- .../conveyal/gtfs/error/NewGTFSErrorType.java | 4 ++ .../gtfs/validator/NamesValidator.java | 38 ++++++++++++++++++- 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java b/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java index 02fc5075d..b35037642 100644 --- a/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java +++ b/src/main/java/com/conveyal/gtfs/error/NewGTFSErrorType.java @@ -44,10 +44,14 @@ public enum NewGTFSErrorType { SERVICE_NEVER_ACTIVE(Priority.MEDIUM, "A service code was defined, but is never active on any date."), SERVICE_UNUSED(Priority.MEDIUM, "A service code was defined, but is never referenced by any trips."), SHAPE_DIST_TRAVELED_NOT_INCREASING(Priority.MEDIUM, "Shape distance traveled must increase with stop times."), + STOP_DESCRIPTION_SAME_AS_NAME(Priority.LOW, "The description of a stop is identical to its name, so does not add any information."), STOP_LOW_POPULATION_DENSITY(Priority.HIGH, "A stop is located in a geographic area with very low human population density."), + STOP_NAME_MISSING(Priority.MEDIUM, "A stop does not have a name."), STOP_GEOGRAPHIC_OUTLIER(Priority.HIGH, "This stop is located very far from the middle 90% of stops in this feed."), STOP_UNUSED(Priority.MEDIUM, "This stop is not referenced by any trips."), TRIP_EMPTY(Priority.HIGH, "This trip is defined but has no stop times."), + TRIP_HEADSIGN_CONTAINS_ROUTE_NAME(Priority.LOW, "A trip headsign contains the route name, but should only contain information to distinguish it from other trips for the route."), + TRIP_HEADSIGN_SHOULD_DESCRIBE_DESTINATION_OR_WAYPOINTS(Priority.LOW, "A trip headsign begins with 'to' or 'towards', but should begin with destination or direction and optionally include waypoints with 'via'"), TRIP_NEVER_ACTIVE(Priority.MEDIUM, "A trip is defined, but its service is never running on any date."), ROUTE_UNUSED(Priority.HIGH, "This route is defined but has no trips."), TRAVEL_DISTANCE_ZERO(Priority.MEDIUM, "The vehicle does not cover any distance between the last stop and this one."), diff --git a/src/main/java/com/conveyal/gtfs/validator/NamesValidator.java b/src/main/java/com/conveyal/gtfs/validator/NamesValidator.java index 913b56453..889a3679b 100644 --- a/src/main/java/com/conveyal/gtfs/validator/NamesValidator.java +++ b/src/main/java/com/conveyal/gtfs/validator/NamesValidator.java @@ -3,6 +3,8 @@ import com.conveyal.gtfs.error.SQLErrorStorage; import com.conveyal.gtfs.loader.Feed; import com.conveyal.gtfs.model.Route; +import com.conveyal.gtfs.model.Stop; +import com.conveyal.gtfs.model.Trip; import static com.conveyal.gtfs.error.NewGTFSErrorType.*; @@ -41,7 +43,41 @@ public void validate() { // TODO we want some additional checking for extended route types. } } - // TODO Check trips and all other tables. + // Check stops + for (Stop stop : feed.stops) { + String name = normalize(stop.stop_name); + String desc = normalize(stop.stop_desc); + // Stops must be named. + if (name.isEmpty()) { + registerError(stop, STOP_NAME_MISSING); + } + // If provided, the description of a stop should be more informative than its name. + if (!desc.isEmpty() && desc.equals(name)) { + registerError(stop, STOP_DESCRIPTION_SAME_AS_NAME, desc); + } + } + // Check trips + for (Trip trip : feed.trips) { + String headsign = normalize(trip.trip_headsign); + // TODO: check trip short name? +// String shortName = normalize(trip.trip_short_name); + Route route = feed.routes.get(trip.route_id); + String routeShortName = "", routeLongName = ""; + if (route != null) { + routeShortName = normalize(route.route_short_name); + routeLongName = normalize(route.route_long_name); + } + // Trip headsign should not duplicate route name. + if (!headsign.isEmpty() && (headsign.contains(routeShortName) || headsign.contains(routeLongName))) { + registerError(trip, TRIP_HEADSIGN_CONTAINS_ROUTE_NAME, headsign); + } + // Trip headsign should not begin with "to" or "towards" (note: headsign normalized to lowercase). Headsigns + // should follow one of the patterns defined in the best practices: http://gtfs.org/best-practices#tripstxt + if (headsign.startsWith("to ") || headsign.startsWith("towards ")) { + registerError(trip, TRIP_HEADSIGN_SHOULD_DESCRIBE_DESTINATION_OR_WAYPOINTS, headsign); + } + } + // TODO Are there other tables we're not checking? } /** @return a non-null String that is lower case and has no leading or trailing whitespace */ From 040a9f8b7317e48a5091a4717b23b215d828f302 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 3 Jan 2019 13:08:36 -0500 Subject: [PATCH 15/24] test(JdbcTableWriter): add CRUD tests for trips and basic create test for frequency --- .../gtfs/graphql/GraphQLGtfsSchema.java | 2 +- .../gtfs/loader/JDBCTableWriterTest.java | 185 ++++++++++++++++-- .../com/conveyal/gtfs/util/CalendarDTO.java | 16 ++ .../com/conveyal/gtfs/util/FrequencyDTO.java | 9 + .../com/conveyal/gtfs/util/PatternDTO.java | 13 ++ .../conveyal/gtfs/util/PatternStopDTO.java | 20 ++ .../com/conveyal/gtfs/util/ShapePointDTO.java | 6 + .../java/com/conveyal/gtfs/util/StopDTO.java | 17 ++ .../com/conveyal/gtfs/util/StopTimeDTO.java | 26 +++ .../java/com/conveyal/gtfs/util/TripDTO.java | 18 ++ .../java/com/conveyal/gtfs/util/UtilTest.java | 7 +- 11 files changed, 300 insertions(+), 19 deletions(-) create mode 100644 src/test/java/com/conveyal/gtfs/util/CalendarDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/FrequencyDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/PatternDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/PatternStopDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/ShapePointDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/StopDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/StopTimeDTO.java create mode 100644 src/test/java/com/conveyal/gtfs/util/TripDTO.java diff --git a/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java b/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java index b5702b85e..d0ee71372 100644 --- a/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java +++ b/src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java @@ -381,6 +381,7 @@ public class GraphQLGtfsSchema { .field(MapFetcher.field("shape_dist_traveled", GraphQLFloat)) .field(MapFetcher.field("drop_off_type", GraphQLInt)) .field(MapFetcher.field("pickup_type", GraphQLInt)) + .field(MapFetcher.field("stop_sequence", GraphQLInt)) .field(MapFetcher.field("timepoint", GraphQLInt)) // FIXME: This will only returns a list with one stop entity (unless there is a referential integrity issue) // Should this be modified to be an object, rather than a list? @@ -390,7 +391,6 @@ public class GraphQLGtfsSchema { .dataFetcher(new JDBCFetcher("stops", "stop_id")) .build() ) - .field(MapFetcher.field("stop_sequence", GraphQLInt)) .build(); /** diff --git a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java index 44a418b4a..57c09b21e 100644 --- a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java +++ b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java @@ -1,11 +1,20 @@ package com.conveyal.gtfs.loader; import com.conveyal.gtfs.TestUtils; +import com.conveyal.gtfs.util.CalendarDTO; import com.conveyal.gtfs.util.FareDTO; import com.conveyal.gtfs.util.FareRuleDTO; import com.conveyal.gtfs.util.FeedInfoDTO; +import com.conveyal.gtfs.util.FrequencyDTO; import com.conveyal.gtfs.util.InvalidNamespaceException; +import com.conveyal.gtfs.util.PatternDTO; +import com.conveyal.gtfs.util.PatternStopDTO; import com.conveyal.gtfs.util.RouteDTO; +import com.conveyal.gtfs.util.ShapePointDTO; +import com.conveyal.gtfs.util.StopDTO; +import com.conveyal.gtfs.util.StopTimeDTO; +import com.conveyal.gtfs.util.TripDTO; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -18,12 +27,19 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.UUID; import static com.conveyal.gtfs.GTFS.createDataSource; import static com.conveyal.gtfs.GTFS.makeSnapshot; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +/** + * This class contains CRUD tests for {@link JdbcTableWriter} (i.e., editing GTFS entities in the RDBMS). Set up + * consists of creating a scratch database and an empty feed snapshot, which is the necessary starting condition + * for building a GTFS feed from scratch. It then runs the various CRUD tests and finishes by dropping the database + * (even if tests fail). + */ public class JDBCTableWriterTest { private static final Logger LOG = LoggerFactory.getLogger(JDBCTableWriterTest.class); @@ -120,6 +136,10 @@ public void canCreateUpdateAndDeleteFeedInfoEntities() throws IOException, SQLEx )); } + /** + * Ensure that potentially malicious SQL injection is sanitized properly during create operations. + * TODO: We might should perform this check on multiple entities and for update and/or delete operations. + */ @Test public void canPreventSQLInjection() throws IOException, SQLException, InvalidNamespaceException { // create new object to be saved @@ -246,23 +266,8 @@ public void canCreateUpdateAndDeleteRoutes() throws IOException, SQLException, I final Class routeDTOClass = RouteDTO.class; // create new object to be saved - RouteDTO routeInput = new RouteDTO(); String routeId = "500"; - routeInput.route_id = routeId; - routeInput.agency_id = "RTA"; - // Empty value should be permitted for transfers and transfer_duration - routeInput.route_short_name = "500"; - routeInput.route_long_name = "Hollingsworth"; - routeInput.route_type = 3; - - // convert object to json and save it - JdbcTableWriter createTableWriter = createTestTableWriter(routeTable); - String createOutput = createTableWriter.create(mapper.writeValueAsString(routeInput), true); - LOG.info("create {} output:", routeTable.name); - LOG.info(createOutput); - - // parse output - RouteDTO createdRoute = mapper.readValue(createOutput, routeDTOClass); + RouteDTO createdRoute = createSimpleTestRoute(routeId, "RTA", "500", "Hollingsworth", 3); // make sure saved data matches expected data assertThat(createdRoute.route_id, equalTo(routeId)); @@ -307,6 +312,154 @@ public void canCreateUpdateAndDeleteRoutes() throws IOException, SQLException, I )); } + /** + * Creates a simple route for testing. + */ + private static RouteDTO createSimpleTestRoute(String routeId, String agencyId, String shortName, String longName, int routeType) throws InvalidNamespaceException, IOException, SQLException { + RouteDTO input = new RouteDTO(); + input.route_id = routeId; + input.agency_id = agencyId; + // Empty value should be permitted for transfers and transfer_duration + input.route_short_name = shortName; + input.route_long_name = longName; + input.route_type = routeType; + // convert object to json and save it + JdbcTableWriter createTableWriter = createTestTableWriter(Table.ROUTES); + String output = createTableWriter.create(mapper.writeValueAsString(input), true); + LOG.info("create {} output:", Table.ROUTES.name); + LOG.info(output); + // parse output + return mapper.readValue(output, RouteDTO.class); + } + + /** + * Creates a pattern by first creating a route and then a pattern for that route. + */ + private static PatternDTO createSimpleRouteAndPattern(String routeId, String patternId, String name) throws InvalidNamespaceException, SQLException, IOException { + // Create new route + createSimpleTestRoute(routeId, "RTA", "500", "Hollingsworth", 3); + // Create new pattern for route + PatternDTO input = new PatternDTO(); + input.pattern_id = patternId; + input.route_id = routeId; + input.name = name; + input.use_frequency = 0; + input.shapes = new ShapePointDTO[]{}; + input.pattern_stops = new PatternStopDTO[]{}; + // Write the pattern to the database + JdbcTableWriter createPatternWriter = createTestTableWriter(Table.PATTERNS); + String output = createPatternWriter.create(mapper.writeValueAsString(input), true); + LOG.info("create {} output:", Table.PATTERNS.name); + LOG.info(output); + // Parse output + return mapper.readValue(output, PatternDTO.class); + } + + /** + * Checks that creating, updating, and deleting a trip functions properly. As a part of this test, a service calendar, + * stops, a route, and a pattern are all created because they are necessary for a trip to be stored. This also tests + * updating a pattern with pattern stops and creating a trip with a frequency entry. + */ + @Test + public void canCreateUpdateAndDeleteTrips() throws IOException, SQLException, InvalidNamespaceException { + // Store Table and Class values for use in test. + final Table tripsTable = Table.TRIPS; + // Create service calendar and stops, both of which are necessary to construct a complete pattern. + String serviceId = "1"; + createWeekdayCalendar(serviceId, "20180103", "20180104"); + String firstStopId = "1"; + String lastStopId = "2"; + createSimpleStop(firstStopId, "First Stop", 34.2222, -87.333); + createSimpleStop(lastStopId, "Last Stop", 34.2233, -87.334); + String routeId = "700"; + String patternId = UUID.randomUUID().toString(); + PatternDTO createdPattern = createSimpleRouteAndPattern(routeId, patternId, "The Loop"); + // Update pattern with pattern stops and TODO shape points + JdbcTableWriter patternUpdater = createTestTableWriter(Table.PATTERNS); + createdPattern.pattern_stops = new PatternStopDTO[]{ + new PatternStopDTO(patternId, firstStopId, 0), + new PatternStopDTO(patternId, lastStopId, 1) + }; + patternUpdater.update(createdPattern.id, mapper.writeValueAsString(createdPattern), true); + // Create new trip for the pattern + JdbcTableWriter createTripWriter = createTestTableWriter(tripsTable); + TripDTO tripInput = new TripDTO(); + tripInput.pattern_id = createdPattern.pattern_id; + tripInput.route_id = routeId; + tripInput.service_id = serviceId; + tripInput.stop_times = new StopTimeDTO[]{ + new StopTimeDTO(firstStopId, 0, 0, 0), + new StopTimeDTO(lastStopId, 60, 60, 1) + }; + // FIXME: Adding a frequency entry to this trip should not be permitted because the pattern has useFrequency set + // to false (0). + FrequencyDTO frequency = new FrequencyDTO(); + int startTime = 6 * 60 * 60; + frequency.start_time = startTime; + frequency.end_time = 9 * 60 * 60; + frequency.headway_secs = 15 * 60; + tripInput.frequencies = new FrequencyDTO[]{frequency}; + String createTripOutput = createTripWriter.create(mapper.writeValueAsString(tripInput), true); + TripDTO createdTrip = mapper.readValue(createTripOutput, TripDTO.class); + // Update trip + // TODO: Add update and delete tests for updating pattern stops, stop_times, and frequencies. + String updatedTripId = "100A"; + createdTrip.trip_id = updatedTripId; + JdbcTableWriter updateTripWriter = createTestTableWriter(tripsTable); + String updateTripOutput = updateTripWriter.update(tripInput.id, mapper.writeValueAsString(createdTrip), true); + TripDTO updatedTrip = mapper.readValue(updateTripOutput, TripDTO.class); + // Check that saved data matches expected data + assertThat(updatedTrip.frequencies[0].start_time, equalTo(startTime)); + assertThat(updatedTrip.trip_id, equalTo(updatedTripId)); + // Delete trip record + JdbcTableWriter deleteTripWriter = createTestTableWriter(tripsTable); + int deleteOutput = deleteTripWriter.delete( + createdTrip.id, + true + ); + LOG.info("deleted {} records from {}", deleteOutput, tripsTable.name); + // Check that route record does not exist in DB + assertThatSqlQueryYieldsZeroRows( + String.format( + "select * from %s.%s where id=%d", + testNamespace, + tripsTable.name, + createdTrip.id + )); + } + + private static StopDTO createSimpleStop(String stopId, String stopName, double latitude, double longitude) throws InvalidNamespaceException, IOException, SQLException { + JdbcTableWriter createStopWriter = new JdbcTableWriter(Table.STOPS, testDataSource, testNamespace); + StopDTO input = new StopDTO(); + input.stop_id = stopId; + input.stop_name = stopName; + input.stop_lat = latitude; + input.stop_lon = longitude; + String output = createStopWriter.create(mapper.writeValueAsString(input), true); + LOG.info("create {} output:", Table.STOPS.name); + LOG.info(output); + return mapper.readValue(output, StopDTO.class); + } + + private static CalendarDTO createWeekdayCalendar(String serviceId, String startDate, String endDate) throws IOException, SQLException, InvalidNamespaceException { + JdbcTableWriter createCalendarWriter = new JdbcTableWriter(Table.CALENDAR, testDataSource, testNamespace); + CalendarDTO calendarInput = new CalendarDTO(); + calendarInput.service_id = serviceId; + calendarInput.monday = 1; + calendarInput.tuesday = 1; + calendarInput.wednesday = 1; + calendarInput.thursday = 1; + calendarInput.friday = 1; + calendarInput.saturday = 0; + calendarInput.sunday = 0; + calendarInput.start_date = startDate; + calendarInput.end_date = endDate; + String output = createCalendarWriter.create(mapper.writeValueAsString(calendarInput), true); + LOG.info("create {} output:", Table.CALENDAR.name); + LOG.info(output); + return mapper.readValue(output, CalendarDTO.class); + } + @AfterClass public static void tearDownClass() { TestUtils.dropDB(testDBName); diff --git a/src/test/java/com/conveyal/gtfs/util/CalendarDTO.java b/src/test/java/com/conveyal/gtfs/util/CalendarDTO.java new file mode 100644 index 000000000..169ffa5ac --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/CalendarDTO.java @@ -0,0 +1,16 @@ +package com.conveyal.gtfs.util; + +public class CalendarDTO { + public Integer id; + public String service_id; + public Integer monday; + public Integer tuesday; + public Integer wednesday; + public Integer thursday; + public Integer friday; + public Integer saturday; + public Integer sunday; + public String start_date; + public String end_date; + public String description; +} diff --git a/src/test/java/com/conveyal/gtfs/util/FrequencyDTO.java b/src/test/java/com/conveyal/gtfs/util/FrequencyDTO.java new file mode 100644 index 000000000..a59617b4a --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/FrequencyDTO.java @@ -0,0 +1,9 @@ +package com.conveyal.gtfs.util; + +public class FrequencyDTO { + public String trip_id; + public Integer start_time; + public Integer end_time; + public Integer headway_secs; + public Integer exact_times; +} diff --git a/src/test/java/com/conveyal/gtfs/util/PatternDTO.java b/src/test/java/com/conveyal/gtfs/util/PatternDTO.java new file mode 100644 index 000000000..eb78e516d --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/PatternDTO.java @@ -0,0 +1,13 @@ +package com.conveyal.gtfs.util; + +public class PatternDTO { + public Integer id; + public String pattern_id; + public String shape_id; + public String route_id; + public Integer direction_id; + public Integer use_frequency; + public String name; + public PatternStopDTO[] pattern_stops; + public ShapePointDTO[] shapes; +} \ No newline at end of file diff --git a/src/test/java/com/conveyal/gtfs/util/PatternStopDTO.java b/src/test/java/com/conveyal/gtfs/util/PatternStopDTO.java new file mode 100644 index 000000000..b6daf838d --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/PatternStopDTO.java @@ -0,0 +1,20 @@ +package com.conveyal.gtfs.util; + +public class PatternStopDTO { + public Integer id; + public String pattern_id; + public String stop_id; + public Integer default_travel_time; + public Integer default_dwell_time; + public Double shape_dist_traveled; + public Integer drop_off_type; + public Integer pickup_type; + public Integer stop_sequence; + public Integer timepoint; + + public PatternStopDTO (String patternId, String stopId, int stopSequence) { + pattern_id = patternId; + stop_id = stopId; + stop_sequence = stopSequence; + } +} diff --git a/src/test/java/com/conveyal/gtfs/util/ShapePointDTO.java b/src/test/java/com/conveyal/gtfs/util/ShapePointDTO.java new file mode 100644 index 000000000..05ed6d6da --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/ShapePointDTO.java @@ -0,0 +1,6 @@ +package com.conveyal.gtfs.util; + +// TODO add fields +public class ShapePointDTO { + +} diff --git a/src/test/java/com/conveyal/gtfs/util/StopDTO.java b/src/test/java/com/conveyal/gtfs/util/StopDTO.java new file mode 100644 index 000000000..4ca821e59 --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/StopDTO.java @@ -0,0 +1,17 @@ +package com.conveyal.gtfs.util; + +public class StopDTO { + public Integer id; + public String stop_id; + public String stop_name; + public String stop_code; + public String stop_desc; + public Double stop_lon; + public Double stop_lat; + public String zone_id; + public String stop_url; + public String stop_timezone; + public String parent_station; + public Integer location_type; + public Integer wheelchair_boarding; +} diff --git a/src/test/java/com/conveyal/gtfs/util/StopTimeDTO.java b/src/test/java/com/conveyal/gtfs/util/StopTimeDTO.java new file mode 100644 index 000000000..bfbc56424 --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/StopTimeDTO.java @@ -0,0 +1,26 @@ +package com.conveyal.gtfs.util; + +public class StopTimeDTO { + public String trip_id; + public String stop_id; + public Integer stop_sequence; + public Integer arrival_time; + public Integer departure_time; + public String stop_headsign; + public Integer timepoint; + public Integer drop_off_type; + public Integer pickup_type; + public Double shape_dist_traveled; + + /** + * Empty constructor for deserialization + */ + public StopTimeDTO () {} + + public StopTimeDTO (String stopId, Integer arrivalTime, Integer departureTime, Integer stopSequence) { + stop_id = stopId; + arrival_time = arrivalTime; + departure_time = departureTime; + stop_sequence = stopSequence; + } +} diff --git a/src/test/java/com/conveyal/gtfs/util/TripDTO.java b/src/test/java/com/conveyal/gtfs/util/TripDTO.java new file mode 100644 index 000000000..ccd43a7e0 --- /dev/null +++ b/src/test/java/com/conveyal/gtfs/util/TripDTO.java @@ -0,0 +1,18 @@ +package com.conveyal.gtfs.util; + +public class TripDTO { + public Integer id; + public String trip_id; + public String trip_headsign; + public String trip_short_name; + public String block_id; + public Integer direction_id; + public String route_id; + public String service_id; + public Integer wheelchair_accessible; + public Integer bikes_allowed; + public String shape_id; + public String pattern_id; + public StopTimeDTO[] stop_times; + public FrequencyDTO[] frequencies; +} diff --git a/src/test/java/com/conveyal/gtfs/util/UtilTest.java b/src/test/java/com/conveyal/gtfs/util/UtilTest.java index c2c6ccd36..982ea31a4 100644 --- a/src/test/java/com/conveyal/gtfs/util/UtilTest.java +++ b/src/test/java/com/conveyal/gtfs/util/UtilTest.java @@ -15,8 +15,11 @@ */ public class UtilTest { - @Before // setup() - public void before() throws Exception { + /** + * Ensure locale (for {@link #canHumanize()}) is set correctly before doing anything else. + */ + @Before + public void before() { java.util.Locale.setDefault(java.util.Locale.US); } From 667d3fc8b0f1f00ed24a5a08593152ecf3ce76a1 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 3 Jan 2019 13:09:00 -0500 Subject: [PATCH 16/24] refactor: address PR comments about inlining string literals --- src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index 51aed6726..90e6f2abd 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -581,11 +581,9 @@ private void updateChildTable( private int updateStopTimesForPatternStop(ObjectNode patternStop, int previousTravelTime) throws SQLException { String sql = String.format( "update %s.stop_times st set arrival_time = ?, departure_time = ? from %s.trips t " + - "where st.trip_id = t.trip_id AND t.%s = ? AND st.%s = ?", + "where st.trip_id = t.trip_id AND t.pattern_id = ? AND st.stop_sequence = ?", tablePrefix, - tablePrefix, - "pattern_id", - "stop_sequence" + tablePrefix ); // Prepare the statement and set statement parameters PreparedStatement statement = connection.prepareStatement(sql); From 55f2d879082c8ff5306f7dca616e936154d1ad89 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 3 Jan 2019 15:47:16 -0500 Subject: [PATCH 17/24] refactor(writer): fix check for useFrequency when updating patterns or trips This refactor includes a check for Pattern#useFrequency when either a pattern is updated or trips are updated in order to appropriately trigger other behaviors (for example, keeping stop times in sync with patter stops travel times or disallowing frequency entries for timetable-based patterns). fixes #173 --- .../conveyal/gtfs/loader/JdbcTableWriter.java | 37 ++++++++++++++----- .../gtfs/loader/JDBCTableWriterTest.java | 9 +++-- 2 files changed, 33 insertions(+), 13 deletions(-) diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java index 90e6f2abd..4b341081e 100644 --- a/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java +++ b/src/main/java/com/conveyal/gtfs/loader/JdbcTableWriter.java @@ -169,12 +169,27 @@ public String update(Integer id, String json, boolean autoCommit) throws SQLExce int entityId = isCreating ? (int) newId : id; // Cast child entities to array node to iterate over. ArrayNode childEntitiesArray = (ArrayNode)childEntities; - boolean foreignReferenceIsFrequencyPattern = jsonObject.has("use_frequency") && - jsonObject.get("use_frequency").asBoolean(); + boolean referencedPatternUsesFrequencies = false; + // If an entity references a pattern (e.g., pattern stop or trip), determine whether the pattern uses + // frequencies because this impacts update behaviors, for example whether stop times are kept in + // sync with default travel times or whether frequencies are allowed to be nested with a JSON trip. + if (jsonObject.has("pattern_id") && !jsonObject.get("pattern_id").isNull()) { + PreparedStatement statement = connection.prepareStatement(String.format( + "select use_frequency from %s.%s where pattern_id = ?", + tablePrefix, + Table.PATTERNS.name + )); + statement.setString(1, jsonObject.get("pattern_id").asText()); + LOG.info(statement.toString()); + ResultSet selectResults = statement.executeQuery(); + while (selectResults.next()) { + referencedPatternUsesFrequencies = selectResults.getBoolean(1); + } + } updateChildTable( childEntitiesArray, entityId, - foreignReferenceIsFrequencyPattern, + referencedPatternUsesFrequencies, isCreating, referencingTable, connection @@ -429,7 +444,7 @@ private void setStatementParameters( private void updateChildTable( ArrayNode subEntities, int id, - boolean foreignReferenceIsFrequencyPattern, + boolean referencedPatternUsesFrequencies, boolean isCreatingNewEntity, Table subTable, Connection connection @@ -442,9 +457,13 @@ private void updateChildTable( // Get parent entity's key value keyValue = getValueForId(id, keyField.name, tablePrefix, specTable, connection); String childTableName = String.join(".", tablePrefix, subTable.name); + if (!referencedPatternUsesFrequencies && subTable.name.equals(Table.FREQUENCIES.name) && subEntities.size() > 0) { + // Do not permit the illegal state where frequency entries are being added/modified for a timetable pattern. + throw new IllegalStateException("Cannot create or update frequency entries for a timetable-based pattern."); + } // Reconciling pattern stops MUST happen before original pattern stops are deleted in below block (with // getUpdateReferencesSql) - if ("pattern_stops".equals(subTable.name)) { + if (Table.PATTERN_STOP.name.equals(subTable.name)) { List newPatternStops = new ArrayList<>(); // Clean up pattern stop ID fields (passed in as string ID from datatools-ui to avoid id collision) for (JsonNode node : subEntities) { @@ -507,7 +526,7 @@ private void updateChildTable( } // Update linked stop times fields for each updated pattern stop (e.g., timepoint, pickup/drop off type). if ("pattern_stops".equals(subTable.name)) { - if (foreignReferenceIsFrequencyPattern) { + if (referencedPatternUsesFrequencies) { // Update stop times linked to pattern stop if the pattern uses frequencies and accumulate time. // Default travel and dwell time behave as "linked fields" for associated stop times. In other // words, frequency trips in the editor must match the pattern stop travel times. @@ -1232,11 +1251,11 @@ private static String getValueForId(int id, String fieldName, String namespace, LOG.info(selectIdSql); Statement selectIdStatement = connection.createStatement(); ResultSet selectResults = selectIdStatement.executeQuery(selectIdSql); - String keyValue = null; + String value = null; while (selectResults.next()) { - keyValue = selectResults.getString(1); + value = selectResults.getString(1); } - return keyValue; + return value; } /** diff --git a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java index 57c09b21e..8fee6c2e7 100644 --- a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java +++ b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java @@ -374,13 +374,16 @@ public void canCreateUpdateAndDeleteTrips() throws IOException, SQLException, In String routeId = "700"; String patternId = UUID.randomUUID().toString(); PatternDTO createdPattern = createSimpleRouteAndPattern(routeId, patternId, "The Loop"); - // Update pattern with pattern stops and TODO shape points + // TODO Test that a frequency trip entry cannot be added for a timetable-based pattern. + // Update pattern with pattern stops, set to use frequencies, and TODO shape points JdbcTableWriter patternUpdater = createTestTableWriter(Table.PATTERNS); + createdPattern.use_frequency = 1; createdPattern.pattern_stops = new PatternStopDTO[]{ new PatternStopDTO(patternId, firstStopId, 0), new PatternStopDTO(patternId, lastStopId, 1) }; - patternUpdater.update(createdPattern.id, mapper.writeValueAsString(createdPattern), true); + String updatedPatternOutput = patternUpdater.update(createdPattern.id, mapper.writeValueAsString(createdPattern), true); + LOG.info("Updated pattern output: {}", updatedPatternOutput); // Create new trip for the pattern JdbcTableWriter createTripWriter = createTestTableWriter(tripsTable); TripDTO tripInput = new TripDTO(); @@ -391,8 +394,6 @@ public void canCreateUpdateAndDeleteTrips() throws IOException, SQLException, In new StopTimeDTO(firstStopId, 0, 0, 0), new StopTimeDTO(lastStopId, 60, 60, 1) }; - // FIXME: Adding a frequency entry to this trip should not be permitted because the pattern has useFrequency set - // to false (0). FrequencyDTO frequency = new FrequencyDTO(); int startTime = 6 * 60 * 60; frequency.start_time = startTime; From 6e9042f33fb3dc78c926b466c71303bf1dae30d0 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 3 Jan 2019 16:31:08 -0500 Subject: [PATCH 18/24] test(JdbcTableWriter): improve writer tests for frequencies --- .../gtfs/loader/JDBCTableWriterTest.java | 102 ++++++++++-------- 1 file changed, 60 insertions(+), 42 deletions(-) diff --git a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java index 8fee6c2e7..46966c1ba 100644 --- a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java +++ b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java @@ -14,7 +14,6 @@ import com.conveyal.gtfs.util.StopDTO; import com.conveyal.gtfs.util.StopTimeDTO; import com.conveyal.gtfs.util.TripDTO; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -27,7 +26,6 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; -import java.util.UUID; import static com.conveyal.gtfs.GTFS.createDataSource; import static com.conveyal.gtfs.GTFS.makeSnapshot; @@ -47,6 +45,9 @@ public class JDBCTableWriterTest { private static String testDBName; private static DataSource testDataSource; private static String testNamespace; + private static String simpleServiceId = "1"; + private static String firstStopId = "1"; + private static String lastStopId = "2"; private static final ObjectMapper mapper = new ObjectMapper(); private static JdbcTableWriter createTestTableWriter (Table table) throws InvalidNamespaceException { @@ -54,8 +55,8 @@ private static JdbcTableWriter createTestTableWriter (Table table) throws Invali } @BeforeClass - public static void setUpClass() throws SQLException { - // create a new database + public static void setUpClass() throws SQLException, IOException, InvalidNamespaceException { + // Create a new database testDBName = TestUtils.generateNewDB(); String dbConnectionUrl = String.format("jdbc:postgresql://localhost/%s", testDBName); testDataSource = createDataSource (dbConnectionUrl, null, null); @@ -67,10 +68,13 @@ public static void setUpClass() throws SQLException { "snapshot_of varchar)"); connection.commit(); LOG.info("feeds table created"); - - // create an empty snapshot to create a new namespace and all the tables + // Create an empty snapshot to create a new namespace and all the tables FeedLoadResult result = makeSnapshot(null, testDataSource); testNamespace = result.uniqueIdentifier; + // Create a service calendar and two stops, both of which are necessary to perform pattern and trip tests. + createWeekdayCalendar(simpleServiceId, "20180103", "20180104"); + createSimpleStop(firstStopId, "First Stop", 34.2222, -87.333); + createSimpleStop(lastStopId, "Last Stop", 34.2233, -87.334); } @Test @@ -313,7 +317,7 @@ public void canCreateUpdateAndDeleteRoutes() throws IOException, SQLException, I } /** - * Creates a simple route for testing. + * Create and store a simple route for testing. */ private static RouteDTO createSimpleTestRoute(String routeId, String agencyId, String shortName, String longName, int routeType) throws InvalidNamespaceException, IOException, SQLException { RouteDTO input = new RouteDTO(); @@ -356,50 +360,38 @@ private static PatternDTO createSimpleRouteAndPattern(String routeId, String pat } /** - * Checks that creating, updating, and deleting a trip functions properly. As a part of this test, a service calendar, - * stops, a route, and a pattern are all created because they are necessary for a trip to be stored. This also tests - * updating a pattern with pattern stops and creating a trip with a frequency entry. + * Test that a frequency trip entry CANNOT be added for a timetable-based pattern. Expects an exception to be thrown. + */ + @Test(expected = IllegalStateException.class) + public void cannotCreateFrequencyForTimetablePattern() throws InvalidNamespaceException, IOException, SQLException { + PatternDTO simplePattern = createSimpleRouteAndPattern("900", "8", "The Loop"); + TripDTO tripInput = constructFrequencyTrip(simplePattern.pattern_id, simplePattern.route_id, 6 * 60 * 60); + JdbcTableWriter createTripWriter = createTestTableWriter(Table.TRIPS); + createTripWriter.create(mapper.writeValueAsString(tripInput), true); + } + + /** + * Checks that creating a frequency trip functions properly. This also updates the pattern to include pattern stops, + * which is a prerequisite for creating a frequency trip with stop times. */ @Test - public void canCreateUpdateAndDeleteTrips() throws IOException, SQLException, InvalidNamespaceException { + public void canCreateFrequencyForFrequencyPattern() throws IOException, SQLException, InvalidNamespaceException { // Store Table and Class values for use in test. final Table tripsTable = Table.TRIPS; - // Create service calendar and stops, both of which are necessary to construct a complete pattern. - String serviceId = "1"; - createWeekdayCalendar(serviceId, "20180103", "20180104"); - String firstStopId = "1"; - String lastStopId = "2"; - createSimpleStop(firstStopId, "First Stop", 34.2222, -87.333); - createSimpleStop(lastStopId, "Last Stop", 34.2233, -87.334); - String routeId = "700"; - String patternId = UUID.randomUUID().toString(); - PatternDTO createdPattern = createSimpleRouteAndPattern(routeId, patternId, "The Loop"); - // TODO Test that a frequency trip entry cannot be added for a timetable-based pattern. + int startTime = 6 * 60 * 60; + PatternDTO simplePattern = createSimpleRouteAndPattern("1000", "9", "The Line"); + TripDTO tripInput = constructFrequencyTrip(simplePattern.pattern_id, simplePattern.route_id, startTime); + JdbcTableWriter createTripWriter = createTestTableWriter(tripsTable); // Update pattern with pattern stops, set to use frequencies, and TODO shape points JdbcTableWriter patternUpdater = createTestTableWriter(Table.PATTERNS); - createdPattern.use_frequency = 1; - createdPattern.pattern_stops = new PatternStopDTO[]{ - new PatternStopDTO(patternId, firstStopId, 0), - new PatternStopDTO(patternId, lastStopId, 1) + simplePattern.use_frequency = 1; + simplePattern.pattern_stops = new PatternStopDTO[]{ + new PatternStopDTO(simplePattern.pattern_id, firstStopId, 0), + new PatternStopDTO(simplePattern.pattern_id, lastStopId, 1) }; - String updatedPatternOutput = patternUpdater.update(createdPattern.id, mapper.writeValueAsString(createdPattern), true); + String updatedPatternOutput = patternUpdater.update(simplePattern.id, mapper.writeValueAsString(simplePattern), true); LOG.info("Updated pattern output: {}", updatedPatternOutput); // Create new trip for the pattern - JdbcTableWriter createTripWriter = createTestTableWriter(tripsTable); - TripDTO tripInput = new TripDTO(); - tripInput.pattern_id = createdPattern.pattern_id; - tripInput.route_id = routeId; - tripInput.service_id = serviceId; - tripInput.stop_times = new StopTimeDTO[]{ - new StopTimeDTO(firstStopId, 0, 0, 0), - new StopTimeDTO(lastStopId, 60, 60, 1) - }; - FrequencyDTO frequency = new FrequencyDTO(); - int startTime = 6 * 60 * 60; - frequency.start_time = startTime; - frequency.end_time = 9 * 60 * 60; - frequency.headway_secs = 15 * 60; - tripInput.frequencies = new FrequencyDTO[]{frequency}; String createTripOutput = createTripWriter.create(mapper.writeValueAsString(tripInput), true); TripDTO createdTrip = mapper.readValue(createTripOutput, TripDTO.class); // Update trip @@ -429,6 +421,29 @@ public void canCreateUpdateAndDeleteTrips() throws IOException, SQLException, In )); } + /** + * Construct (without writing to the database) a trip with a frequency entry. + */ + private TripDTO constructFrequencyTrip(String patternId, String routeId, int startTime) { + TripDTO tripInput = new TripDTO(); + tripInput.pattern_id = patternId; + tripInput.route_id = routeId; + tripInput.service_id = simpleServiceId; + tripInput.stop_times = new StopTimeDTO[]{ + new StopTimeDTO(firstStopId, 0, 0, 0), + new StopTimeDTO(lastStopId, 60, 60, 1) + }; + FrequencyDTO frequency = new FrequencyDTO(); + frequency.start_time = startTime; + frequency.end_time = 9 * 60 * 60; + frequency.headway_secs = 15 * 60; + tripInput.frequencies = new FrequencyDTO[]{frequency}; + return tripInput; + } + + /** + * Create and store a simple stop entity. + */ private static StopDTO createSimpleStop(String stopId, String stopName, double latitude, double longitude) throws InvalidNamespaceException, IOException, SQLException { JdbcTableWriter createStopWriter = new JdbcTableWriter(Table.STOPS, testDataSource, testNamespace); StopDTO input = new StopDTO(); @@ -442,6 +457,9 @@ private static StopDTO createSimpleStop(String stopId, String stopName, double l return mapper.readValue(output, StopDTO.class); } + /** + * Create and store a simple calendar that runs on each weekday. + */ private static CalendarDTO createWeekdayCalendar(String serviceId, String startDate, String endDate) throws IOException, SQLException, InvalidNamespaceException { JdbcTableWriter createCalendarWriter = new JdbcTableWriter(Table.CALENDAR, testDataSource, testNamespace); CalendarDTO calendarInput = new CalendarDTO(); From 0fcf662c193844e12e3cafd8c9fb47edf609f5d4 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 3 Jan 2019 16:49:00 -0500 Subject: [PATCH 19/24] refactor: fix comment language from bad copypasta --- src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java index 46966c1ba..0665ad28e 100644 --- a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java +++ b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java @@ -411,7 +411,7 @@ public void canCreateFrequencyForFrequencyPattern() throws IOException, SQLExcep true ); LOG.info("deleted {} records from {}", deleteOutput, tripsTable.name); - // Check that route record does not exist in DB + // Check that record does not exist in DB assertThatSqlQueryYieldsZeroRows( String.format( "select * from %s.%s where id=%d", From 67a5d569ec9d9686125efdf0fdb663c8c601e0ec Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Thu, 3 Jan 2019 17:01:01 -0500 Subject: [PATCH 20/24] test: refactor vague assertion into countValidationErrorsOfType --- src/test/java/com/conveyal/gtfs/GTFSTest.java | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/test/java/com/conveyal/gtfs/GTFSTest.java b/src/test/java/com/conveyal/gtfs/GTFSTest.java index 2f4fa4af3..0bf4c2237 100644 --- a/src/test/java/com/conveyal/gtfs/GTFSTest.java +++ b/src/test/java/com/conveyal/gtfs/GTFSTest.java @@ -457,7 +457,11 @@ private void assertThatImportedGtfsMeetsExpectations( // Store field mismatches here (to provide assertion statements with more details). Multimap fieldsWithMismatches = ArrayListMultimap.create(); // Check that no validators failed during validation. - assertThatValidationErrorsMatchExpected(connection, namespace, NewGTFSErrorType.VALIDATOR_FAILED, 0); + assertThat( + "No validators failed during GTFS import.", + countValidationErrorsOfType(connection, namespace, NewGTFSErrorType.VALIDATOR_FAILED), + equalTo(0) + ); // run through testing expectations LOG.info("testing expectations of record storage in the database"); for (PersistenceExpectation persistenceExpectation : persistenceExpectations) { @@ -540,11 +544,10 @@ private void assertThatImportedGtfsMeetsExpectations( } } - private void assertThatValidationErrorsMatchExpected( + private static int countValidationErrorsOfType( Connection connection, String namespace, - NewGTFSErrorType errorType, - int expectedCount + NewGTFSErrorType errorType ) throws SQLException { String errorCheckSql = String.format( "select * from %s.errors where error_type = '%s'", @@ -556,11 +559,7 @@ private void assertThatValidationErrorsMatchExpected( while (errorResults.next()) { errorCount++; } - assertThat( - String.format("%s errors encountered during validation should match the expected errors.", errorType), - errorCount, - equalTo(expectedCount) - ); + return errorCount; } /** From 0311a684ad1a061d6f4071d679545b8eeb6c9465 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Fri, 4 Jan 2019 10:28:02 -0500 Subject: [PATCH 21/24] test(writer): fix failure message in assertion --- src/test/java/com/conveyal/gtfs/GTFSTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/com/conveyal/gtfs/GTFSTest.java b/src/test/java/com/conveyal/gtfs/GTFSTest.java index f655ee783..3e2c8fe69 100644 --- a/src/test/java/com/conveyal/gtfs/GTFSTest.java +++ b/src/test/java/com/conveyal/gtfs/GTFSTest.java @@ -402,7 +402,7 @@ private void assertThatImportedGtfsMeetsExpectations( Multimap fieldsWithMismatches = ArrayListMultimap.create(); // Check that no validators failed during validation. assertThat( - "No validators failed during GTFS import.", + "One or more validators failed during GTFS import.", countValidationErrorsOfType(connection, namespace, NewGTFSErrorType.VALIDATOR_FAILED), equalTo(0) ); From 37fb468f60278eb63456ca3ccec73814aad62bf3 Mon Sep 17 00:00:00 2001 From: Landon Reed Date: Fri, 4 Jan 2019 10:40:27 -0500 Subject: [PATCH 22/24] test(writer): rename frequency trip crud test --- src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java index 0665ad28e..592874ae5 100644 --- a/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java +++ b/src/test/java/com/conveyal/gtfs/loader/JDBCTableWriterTest.java @@ -375,7 +375,7 @@ public void cannotCreateFrequencyForTimetablePattern() throws InvalidNamespaceEx * which is a prerequisite for creating a frequency trip with stop times. */ @Test - public void canCreateFrequencyForFrequencyPattern() throws IOException, SQLException, InvalidNamespaceException { + public void canCreateUpdateAndDeleteFrequencyTripForFrequencyPattern() throws IOException, SQLException, InvalidNamespaceException { // Store Table and Class values for use in test. final Table tripsTable = Table.TRIPS; int startTime = 6 * 60 * 60; From 5fc8bd38e525cacf78c9fd0cc4a591ba848a82a3 Mon Sep 17 00:00:00 2001 From: evansiroky Date: Fri, 4 Jan 2019 09:59:47 -0800 Subject: [PATCH 23/24] ci(codecov): run codecov after semantic-release this way it will upload a report for the code coverage after all the commits made with maven-semantic-release have completed. Codecov looks at the most recent commit to master to compare the next report to, so this is needed for code coverage comparisons on PRs to master. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 518f41399..fbf462435 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,5 +64,5 @@ notifications: # Push results to codecov.io and run semantic-release (releases only created on pushes to the master branch). after_success: - - bash <(curl -s https://codecov.io/bash) - semantic-release --prepare @conveyal/maven-semantic-release --publish @semantic-release/github,@conveyal/maven-semantic-release --verify-conditions @semantic-release/github,@conveyal/maven-semantic-release --verify-release @conveyal/maven-semantic-release --use-conveyal-workflow --dev-branch=dev + - bash <(curl -s https://codecov.io/bash) \ No newline at end of file From 6d3997efd1c858947c7497c4f3f38bf38ee43aa9 Mon Sep 17 00:00:00 2001 From: evansiroky Date: Fri, 4 Jan 2019 10:08:00 -0800 Subject: [PATCH 24/24] ci(codecov): add comment explaining placement of codecov --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index fbf462435..c691130b5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -65,4 +65,6 @@ notifications: # Push results to codecov.io and run semantic-release (releases only created on pushes to the master branch). after_success: - semantic-release --prepare @conveyal/maven-semantic-release --publish @semantic-release/github,@conveyal/maven-semantic-release --verify-conditions @semantic-release/github,@conveyal/maven-semantic-release --verify-release @conveyal/maven-semantic-release --use-conveyal-workflow --dev-branch=dev + # run codecov after semantic-release because maven-semantic-release creates extra commits that + # codecov will need a report on to reference in future PRs to the release branch - bash <(curl -s https://codecov.io/bash) \ No newline at end of file