diff --git a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java index bf6b08246..ed6c8afd4 100644 --- a/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java +++ b/cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java @@ -67,7 +67,6 @@ import io.javalin.plugin.openapi.annotations.OpenApiParam; import io.javalin.plugin.openapi.annotations.OpenApiRequestBody; import io.javalin.plugin.openapi.annotations.OpenApiResponse; -import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; @@ -84,7 +83,7 @@ public class TimeSeriesController implements CrudHandler { private static final Logger logger = Logger.getLogger(TimeSeriesController.class.getName()); - + private static final String INCLUDE_ENTRY_DATE = "include-entry-date"; public static final String TAG = "TimeSeries"; public static final String STORE_RULE_DESC = "The business rule to use " + "when merging the incoming with existing data\n" @@ -204,7 +203,7 @@ public void create(@NotNull Context ctx) { TimeSeries timeSeries = deserializeTimeSeries(ctx); dao.create(timeSeries, createAsLrts, storeRule, overrideProtection); ctx.status(HttpServletResponse.SC_OK); - } catch (IOException | DataAccessException ex) { + } catch (DataAccessException ex) { CdaError re = new CdaError("Internal Error"); logger.log(Level.SEVERE, re.toString(), ex); ctx.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR).json(re); @@ -382,6 +381,8 @@ public void delete(@NotNull Context ctx, @NotNull String timeseries) { + "\n* `xml`" + "\n* `wml2` (only if name field is specified)" + "\n* `json` (default)"), + @OpenApiParam(name = INCLUDE_ENTRY_DATE, type = Boolean.class, description = "Specifies " + + "whether to include the data entry date in the response. Default is false."), @OpenApiParam(name = PAGE, description = "This end point can return large amounts " + "of data as a series of pages. This parameter is used to describes the " + "current location in the response stream. This is an opaque " @@ -431,6 +432,9 @@ public void getAll(@NotNull Context ctx) { ZonedDateTime versionDate = queryParamAsZdt(ctx, VERSION_DATE); + boolean includeEntryDate = ctx.queryParamAsClass(INCLUDE_ENTRY_DATE, Boolean.class) + .getOrDefault(false); + // The following parameters are only used for jsonv2 and xmlv2 String cursor = queryParamAsClass(ctx, new String[]{PAGE, CURSOR}, String.class, "", metrics, name(TimeSeriesController.class.getName(), @@ -463,7 +467,7 @@ public void getAll(@NotNull Context ctx) { String office = requiredParam(ctx, OFFICE); TimeSeries ts = dao.getTimeseries(cursor, pageSize, names, office, unit, - beginZdt, endZdt, versionDate, trim.getOrDefault(true)); + beginZdt, endZdt, versionDate, trim.getOrDefault(true), includeEntryDate); results = Formats.format(contentType, ts); @@ -573,14 +577,14 @@ public void update(@NotNull Context ctx, @NotNull String id) { dao.store(timeSeries, createAsLrts, storeRule, overrideProtection); ctx.status(HttpServletResponse.SC_OK); - } catch (IOException | DataAccessException ex) { + } catch (DataAccessException ex) { CdaError re = new CdaError("Internal Error"); logger.log(Level.SEVERE, re.toString(), ex); ctx.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR).json(re); } } - private TimeSeries deserializeTimeSeries(Context ctx) throws IOException { + private TimeSeries deserializeTimeSeries(Context ctx) { String contentTypeHeader = ctx.req.getContentType(); ContentType contentType = Formats.parseHeader(contentTypeHeader, TimeSeries.class); return Formats.parseContent(contentType, ctx.bodyAsInputStream(), TimeSeries.class); diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/LocationLevelsDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/LocationLevelsDaoImpl.java index 470e89000..1406c5d1b 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/LocationLevelsDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/LocationLevelsDaoImpl.java @@ -62,7 +62,6 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; @@ -242,13 +241,12 @@ private static SEASONAL_VALUE_TAB_T getSeasonalValues(LocationLevel locationLeve List seasonalValues = locationLevel.getSeasonalValues(); SEASONAL_VALUE_TAB_T pSeasonalValues = null; - if(seasonalValues != null && !seasonalValues.isEmpty()) { + if (seasonalValues != null && !seasonalValues.isEmpty()) { pSeasonalValues = new SEASONAL_VALUE_TAB_T(); - for(SeasonalValueBean seasonalValue : seasonalValues) - { + for (SeasonalValueBean seasonalValue : seasonalValues) { SEASONAL_VALUE_T seasonalValueT = new SEASONAL_VALUE_T(); seasonalValueT.setOFFSET_MINUTES(toBigDecimal(seasonalValue.getOffsetMinutes())); - if(seasonalValue.getOffsetMonths() != null) { + if (seasonalValue.getOffsetMonths() != null) { seasonalValueT.setOFFSET_MONTHS(seasonalValue.getOffsetMonths().byteValue()); } seasonalValueT.setVALUE(toBigDecimal(seasonalValue.getValue())); @@ -469,7 +467,7 @@ private void addSeasonalValue(Record r, String calOffset = r.get(view.CALENDAR_OFFSET); String timeOffset = r.get(view.TIME_OFFSET); JDomSeasonalIntervalImpl newSeasonalOffset = buildSeasonalOffset(calOffset, timeOffset); - SeasonalValueBean seasonalValue = buildSeasonalValueBean(seasonalLevel, newSeasonalOffset) ; + SeasonalValueBean seasonalValue = buildSeasonalValueBean(seasonalLevel, newSeasonalOffset); builder.withSeasonalValue(seasonalValue); } } diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDao.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDao.java index 8750b24f3..29be65bb2 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDao.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDao.java @@ -27,7 +27,7 @@ void store(TimeSeries timeSeries, boolean createAsLrts, TimeSeries getTimeseries(String cursor, int pageSize, String names, String office, String unit, ZonedDateTime begin, ZonedDateTime end, - ZonedDateTime versionDate, boolean trim); + ZonedDateTime versionDate, boolean trim, boolean includeEntryDate); String getTimeseries(String format, String names, String office, String unit, String datum, ZonedDateTime begin, ZonedDateTime end, ZoneId timezone); diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java index f6698153f..cdf9c22e0 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dao/TimeSeriesDaoImpl.java @@ -23,6 +23,7 @@ import cwms.cda.data.dto.RecentValue; import cwms.cda.data.dto.TimeSeries; import cwms.cda.data.dto.TimeSeriesExtents; +import cwms.cda.data.dto.TimeSeriesWithDate; import cwms.cda.data.dto.Tsv; import cwms.cda.data.dto.TsvDqu; import cwms.cda.data.dto.TsvId; @@ -63,6 +64,7 @@ import org.jooq.Record; import org.jooq.Record1; import org.jooq.Record3; +import org.jooq.Record4; import org.jooq.Record7; import org.jooq.Result; import org.jooq.SQL; @@ -165,8 +167,7 @@ public String getTimeseries(String format, String names, String office, String u public TimeSeries getTimeseries(String page, int pageSize, String names, String office, String units, ZonedDateTime beginTime, ZonedDateTime endTime, - ZonedDateTime versionDate, boolean shouldTrim) { - TimeSeries retVal = null; + ZonedDateTime versionDate, boolean shouldTrim, boolean includeEntryDate) { String cursor = null; Timestamp tsCursor = null; Integer total = null; @@ -177,7 +178,7 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String logger.fine("Decoded cursor"); logger.finest(() -> { StringBuilder sb = new StringBuilder(); - for (String p: parts) { + for (String p : parts) { sb.append(p).append("\n"); } return sb.toString(); @@ -249,7 +250,6 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String unit.as("units"), ival.as("interval"), param.as("parm_part") - ).from(validTs) ); @@ -258,9 +258,6 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String Field valueCol = field("VALUE", Double.class).as("VALUE"); Field qualityCol = field("QUALITY_CODE", Integer.class).as("QUALITY_CODE"); - Field qualityNormCol = CWMS_TS_PACKAGE.call_NORMALIZE_QUALITY( - DSL.nvl(qualityCol, DSL.inline(5))).as("QUALITY_NORM"); - Long beginTimeMilli = beginTime.toInstant().toEpochMilli(); Long endTimeMilli = endTime.toInstant().toEpochMilli(); String trim = formatBool(shouldTrim); @@ -277,18 +274,20 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String maxVersion = "T"; } + Field qualityNormCol = CWMS_TS_PACKAGE.call_NORMALIZE_QUALITY( + DSL.nvl(qualityCol, DSL.inline(5))).as("QUALITY_NORM"); // Now we're going to call the retrieve_ts_out_tab function to get the data and build an // internal table from it so we can manipulate it further // This code assumes the database timezone is in UTC (per Oracle recommendation) SQL retrieveSelectData = DSL.sql( - "table(cwms_20.cwms_ts.retrieve_ts_out_tab(?,?," - + "cwms_20.cwms_util.to_timestamp(?), cwms_20.cwms_util.to_timestamp(?), 'UTC'," - + "?,?,?,?,?," - + getVersionPart(versionDate) + ",?,?) ) retrieveTs", - tsId, unit, - beginTimeMilli, endTimeMilli, //tz hardcoded - trim, startInclusive, endInclusive, previous, next, - versionDateMilli, maxVersion, officeId); + "table(cwms_20.cwms_ts.retrieve_ts_out_tab(?,?," + + "cwms_20.cwms_util.to_timestamp(?), cwms_20.cwms_util.to_timestamp(?), 'UTC'," + + "?,?,?,?,?," + + getVersionPart(versionDate) + ",?,?) ) retrieveTs", + tsId, unit, + beginTimeMilli, endTimeMilli, //tz hardcoded + trim, startInclusive, endInclusive, previous, next, + versionDateMilli, maxVersion, officeId); Field tzName = AV_CWMS_TS_ID2.TIME_ZONE_ID; @@ -351,25 +350,72 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String logger.fine(() -> metadataQuery.getSQL(ParamType.INLINED)); - VersionType finalDateVersionType = getVersionType(dsl, names, office, versionDate != null); + TimeSeries timeseries = metadataQuery.fetchOne(tsMetadata -> { String vert = (String) tsMetadata.getValue("VERTICAL_DATUM"); VerticalDatumInfo verticalDatumInfo = parseVerticalDatumInfo(vert); - - return new TimeSeries(recordCursor, recordPageSize, tsMetadata.getValue("TOTAL", - Integer.class), tsMetadata.getValue("NAME", String.class), - tsMetadata.getValue("office_id", String.class), - beginTime, endTime, tsMetadata.getValue("units", String.class), - Duration.ofMinutes(tsMetadata.get("interval") == null ? 0 : - tsMetadata.getValue("interval", Long.class)), - verticalDatumInfo, - tsMetadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET).longValue(), - tsMetadata.getValue(tzName), - versionDate, finalDateVersionType - ); + VersionType finalDateVersionType = getVersionType(dsl, names, office, versionDate != null); + if (!includeEntryDate) { + return new TimeSeries(recordCursor, recordPageSize, tsMetadata.getValue("TOTAL", + Integer.class), tsMetadata.getValue("NAME", String.class), + tsMetadata.getValue("office_id", String.class), + beginTime, endTime, tsMetadata.getValue("units", String.class), + Duration.ofMinutes(tsMetadata.get("interval") == null ? 0 : + tsMetadata.getValue("interval", Long.class)), + verticalDatumInfo, + tsMetadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET).longValue(), + tsMetadata.getValue(tzName), + versionDate, finalDateVersionType + ); + } else { + return new TimeSeriesWithDate(recordCursor, recordPageSize, tsMetadata.getValue("TOTAL", + Integer.class), tsMetadata.getValue("NAME", String.class), + tsMetadata.getValue("office_id", String.class), + beginTime, endTime, tsMetadata.getValue("units", String.class), + Duration.ofMinutes(tsMetadata.get("interval") == null ? 0 : + tsMetadata.getValue("interval", Long.class)), + verticalDatumInfo, + tsMetadata.getValue(AV_CWMS_TS_ID2.INTERVAL_UTC_OFFSET).longValue(), + tsMetadata.getValue(tzName), + versionDate, finalDateVersionType + ); + } }); + if (includeEntryDate) { + timeseries = new TimeSeriesWithDate(timeseries); + } + + + Field dataEntryDate = field("DATA_ENTRY_DATE", Timestamp.class).as("data_entry_date"); + Condition whereCond = dateTimeCol + .greaterOrEqual(CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2( + DSL.nvl(DSL.val(tsCursor == null ? null : + tsCursor.toInstant().toEpochMilli()), + DSL.val(beginTime.toInstant().toEpochMilli())))) + .and(dateTimeCol + .lessOrEqual(CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2( + DSL.val(endTime.toInstant().toEpochMilli()))) + .and(AV_TSV_DQU.AV_TSV_DQU.CWMS_TS_ID.equalIgnoreCase(names)) + .and(AV_TSV_DQU.AV_TSV_DQU.OFFICE_ID.eq(office)) + .and(AV_TSV_DQU.AV_TSV_DQU.UNIT_ID.equalIgnoreCase(unit))); + + TimeSeries retVal = null; if (pageSize != 0) { + if (versionDate != null) { + whereCond = whereCond.and(AV_TSV_DQU.AV_TSV_DQU.VERSION_DATE.eq(versionDate == null ? null : + Timestamp.from(versionDate.toInstant()))); + } + + SelectConditionStep> query2 = dsl.select( + dateTimeCol, + valueCol, + qualityNormCol, + dataEntryDate + ) + .from(AV_TSV_DQU.AV_TSV_DQU) + .where(whereCond); + SelectConditionStep> query = dsl.select( dateTimeCol, @@ -389,18 +435,29 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String if (pageSize > 0) { query.limit(DSL.val(pageSize + 1)); + query2.limit(DSL.val(pageSize + 1)); } - logger.fine(() -> query.getSQL(ParamType.INLINED)); - - query.forEach(tsRecord -> timeseries.addValue( - tsRecord.getValue(dateTimeCol), - tsRecord.getValue(valueCol), - tsRecord.getValue(qualityNormCol).intValue() - ) - ); - - retVal = timeseries; + if (includeEntryDate) { + logger.fine(() -> query2.getSQL(ParamType.INLINED)); + final TimeSeriesWithDate timeSeries = new TimeSeriesWithDate(timeseries); + query2.forEach(tsRecord -> timeSeries.addValue( + tsRecord.getValue(dateTimeCol), + tsRecord.getValue(valueCol), + tsRecord.getValue(qualityNormCol).intValue(), + tsRecord.getValue(dataEntryDate) + )); + retVal = timeSeries; + } else { + logger.fine(() -> query.getSQL(ParamType.INLINED)); + final TimeSeries finalTimeseries = timeseries; + query.forEach(tsRecord -> finalTimeseries.addValue( + tsRecord.getValue(dateTimeCol), + tsRecord.getValue(valueCol), + tsRecord.getValue(qualityNormCol).intValue() + )); + retVal = finalTimeseries; + } } return retVal; @@ -482,7 +539,8 @@ public Catalog getTimeSeriesCatalog(String page, int pageSize, CatalogRequestPar String cursorOffice = null; Catalog.CatalogPage catPage = null; if (page == null || page.isEmpty()) { - CommonTableExpression limiter = buildWithClause(inputParams, buildWhereConditions(inputParams), new ArrayList<>(), pageSize, true); + CommonTableExpression limiter = buildWithClause(inputParams, buildWhereConditions(inputParams), + new ArrayList<>(), pageSize, true); SelectJoinStep> totalQuery = dsl.with(limiter) .select(countDistinct(limiter.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.TS_CODE))) .from(limiter); @@ -531,7 +589,8 @@ public Catalog getTimeSeriesCatalog(String page, int pageSize, CatalogRequestPar .on(limiterCode .eq(AV_TS_EXTENTS_UTC.TS_CODE.coerce(limiterCode))); } - final SelectSeekStep2 overallQuery = tmpQuery.orderBy(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID, AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID); + final SelectSeekStep2 overallQuery = tmpQuery.orderBy(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID, + AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID); logger.info(() -> overallQuery.getSQL(ParamType.INLINED)); Result result = overallQuery.fetch(); @@ -567,8 +626,8 @@ public Catalog getTimeSeriesCatalog(String page, int pageSize, CatalogRequestPar } }); - List entries = tsIdExtentMap.entrySet().stream() - .map(e -> e.getValue().build()) + List entries = tsIdExtentMap.values().stream() + .map(TimeseriesCatalogEntry.Builder::build) .collect(Collectors.toList()); return new Catalog(catPage != null ? catPage.toString() : null, @@ -694,20 +753,23 @@ public Catalog getTimeSeriesCatalog(String page, int pageSize, CatalogRequestPar TableLike innerSelect = selectDistinct(selectFields) .from(fromTable) .where(whereConditions).and(DSL.and(pagingConditions)) - .orderBy(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID, AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID) + .orderBy(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID, + AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID) .asTable("limiterInner"); if (forCount) { return name("limiter").as( select(asterisk()) .from(innerSelect) - .orderBy(innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID), innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID)) + .orderBy(innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID), + innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID)) ); } else { return name("limiter").as( select(asterisk()) .from(innerSelect) .where(field("rownum").lessOrEqual(pageSize)) - .orderBy(innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID), innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID)) + .orderBy(innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.DB_OFFICE_ID), + innerSelect.field(AV_CWMS_TS_ID.AV_CWMS_TS_ID.CWMS_TS_ID)) ); } } @@ -932,18 +994,18 @@ public List findMostRecentsInRange(List tsIds, Timestamp pa // Using the innerSelect field makes DATA_ENTRY_DATE correctly map to Timestamp // and the generated sql refers to columns from the alias_??? table. Field[] queryFields = new Field[]{ - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.CWMS_TS_ID), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.OFFICE_ID), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.TS_CODE), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.VERSION_DATE), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.DATA_ENTRY_DATE), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.VALUE), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.QUALITY_CODE), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.START_DATE), - innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.END_DATE), - unitField, - dateTimeField, - innerSelect.field(tsField) + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.CWMS_TS_ID), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.OFFICE_ID), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.TS_CODE), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.VERSION_DATE), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.DATA_ENTRY_DATE), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.VALUE), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.QUALITY_CODE), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.START_DATE), + innerSelect.field(AV_TSV_DQU.AV_TSV_DQU.END_DATE), + unitField, + dateTimeField, + innerSelect.field(tsField) }; SelectConditionStep query = dsl.select(queryFields) @@ -1030,18 +1092,18 @@ public List findRecentsInRange(String office, String categoryId, St Field unit = innerSelect.field(tsvView.UNIT_ID); Field[] queryFields = new Field[]{ - innerSelect.field(tsvView.OFFICE_ID), - innerSelect.field(tsvView.TS_CODE), - innerSelect.field(tsvView.VERSION_DATE), - innerSelect.field(tsvView.DATA_ENTRY_DATE), - innerSelect.field(tsvView.VALUE), - innerSelect.field(tsvView.QUALITY_CODE), - innerSelect.field(tsvView.START_DATE), - innerSelect.field(tsvView.END_DATE), - dateTime, - unit, - innerSelect.field(AV_TS_GRP_ASSGN.AV_TS_GRP_ASSGN.TS_ID), - innerSelect.field(AV_TS_GRP_ASSGN.AV_TS_GRP_ASSGN.ATTRIBUTE)}; + innerSelect.field(tsvView.OFFICE_ID), + innerSelect.field(tsvView.TS_CODE), + innerSelect.field(tsvView.VERSION_DATE), + innerSelect.field(tsvView.DATA_ENTRY_DATE), + innerSelect.field(tsvView.VALUE), + innerSelect.field(tsvView.QUALITY_CODE), + innerSelect.field(tsvView.START_DATE), + innerSelect.field(tsvView.END_DATE), + dateTime, + unit, + innerSelect.field(AV_TS_GRP_ASSGN.AV_TS_GRP_ASSGN.TS_ID), + innerSelect.field(AV_TS_GRP_ASSGN.AV_TS_GRP_ASSGN.ATTRIBUTE)}; return dsl.select(queryFields) .from(innerSelect) diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java index da2af8cbd..050682adf 100644 --- a/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java +++ b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeries.java @@ -4,6 +4,8 @@ import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonFormat.Shape; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.annotation.JsonRootName; @@ -17,7 +19,6 @@ import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.media.Schema.AccessMode; - import java.lang.reflect.Field; import java.sql.Timestamp; import java.time.Duration; @@ -27,6 +28,7 @@ @JsonRootName("timeseries") @JsonPropertyOrder(alphabetic = true) +@JsonInclude(JsonInclude.Include.NON_NULL) @JsonNaming(PropertyNamingStrategies.KebabCaseStrategy.class) @FormattableWith(contentType = Formats.JSONV2, formatter = JsonV2.class, aliases = {Formats.DEFAULT, Formats.JSON}) @FormattableWith(contentType = Formats.XMLV2, formatter = XMLv2.class, aliases = {Formats.XML}) @@ -42,10 +44,10 @@ public class TimeSeries extends CwmsDTOPaginated { @Schema(description = "The units of the time series data",required = true) String units; - @Schema(description = "The version type for the time series being queried. Can be in the form of MAX_AGGREGATE, SINGLE_VERSION, or UNVERSIONED. " + - "MAX_AGGREGATE will get the latest version date value for each value in the date range. SINGLE_VERSION must be called with a valid " + - "version date and will return the values for the version date provided. UNVERSIONED return values from an unversioned time series. " + - "Note that SINGLE_VERSION requires a valid version date while MAX_AGGREGATE and UNVERSIONED each require a null version date.") + @Schema(description = "The version type for the time series being queried. Can be in the form of MAX_AGGREGATE, SINGLE_VERSION, or UNVERSIONED. " + + "MAX_AGGREGATE will get the latest version date value for each value in the date range. SINGLE_VERSION must be called with a valid " + + "version date and will return the values for the version date provided. UNVERSIONED return values from an unversioned time series. " + + "Note that SINGLE_VERSION requires a valid version date while MAX_AGGREGATE and UNVERSIONED each require a null version date.") @JsonFormat(shape = Shape.STRING) VersionType dateVersionType; @@ -91,7 +93,7 @@ public class TimeSeries extends CwmsDTOPaginated { @Schema( accessMode = AccessMode.READ_ONLY, - description="Offset from top of interval" + description = "Offset from top of interval" ) private Long intervalOffset; @@ -103,13 +105,13 @@ public class TimeSeries extends CwmsDTOPaginated { @SuppressWarnings("unused") // required so JAXB can initialize and marshal - private TimeSeries() {} + protected TimeSeries() {} public TimeSeries(String page, int pageSize, Integer total, String name, String officeId, ZonedDateTime begin, ZonedDateTime end, String units, Duration interval) { this(page, pageSize, total, name, officeId, begin, end, units, interval, null, null, null, null, null); } - public TimeSeries(String page, int pageSize, Integer total, String name, String officeId, ZonedDateTime begin, ZonedDateTime end, String units, Duration interval, VerticalDatumInfo info, ZonedDateTime versionDate, VersionType dateVersionType){ + public TimeSeries(String page, int pageSize, Integer total, String name, String officeId, ZonedDateTime begin, ZonedDateTime end, String units, Duration interval, VerticalDatumInfo info, ZonedDateTime versionDate, VersionType dateVersionType) { this(page, pageSize, total, name, officeId, begin, end, units, interval, info, null, null, versionDate, dateVersionType); } @@ -159,7 +161,7 @@ public ZonedDateTime getEnd() { } // Use the array shape to optimize data transfer to client - @JsonFormat(shape=JsonFormat.Shape.ARRAY) + @JsonFormat(shape = JsonFormat.Shape.ARRAY) public List getValues() { return values; } @@ -169,8 +171,7 @@ public List getXmlValues() { return values; } - public VerticalDatumInfo getVerticalDatumInfo() - { + public VerticalDatumInfo getVerticalDatumInfo() { return verticalDatumInfo; } @@ -186,7 +187,9 @@ public ZonedDateTime getVersionDate() { return versionDate; } - public VersionType getDateVersionType() { return dateVersionType; } + public VersionType getDateVersionType() { + return dateVersionType; + } @JsonProperty(value = "value-columns") @Schema(name = "value-columns", accessMode = AccessMode.READ_ONLY) @@ -194,31 +197,28 @@ public List getValueColumnsJSON() { return getColumnDescriptor(); } - public boolean addValue(Timestamp dateTime, Double value, int qualityCode) { + public void addValue(Timestamp dateTime, Double value, int qualityCode) { // Set the current page, if not set - if((page == null || page.isEmpty()) && values.isEmpty()) { + if ((page == null || page.isEmpty()) && values.isEmpty()) { page = encodeCursor(String.format("%d", dateTime.getTime()), pageSize, total); } - if(pageSize > 0 && values.size() == pageSize) { + if (pageSize > 0 && values.size() == pageSize) { nextPage = encodeCursor(String.format("%d", dateTime.toInstant().toEpochMilli()), pageSize, total); - return false; } else { - return values.add(new Record(dateTime, value, qualityCode)); + values.add(new Record(dateTime, value, qualityCode)); } } private List getColumnDescriptor() { List columns = new ArrayList<>(); - for (Field f: Record.class.getDeclaredFields()) { JsonProperty field = f.getAnnotation(JsonProperty.class); - if(field != null) { + if (field != null) { String fieldName = !field.value().isEmpty() ? field.value() : f.getName(); int fieldIndex = field.index(); columns.add(new TimeSeries.Column(fieldName, fieldIndex + 1, f.getType())); } } - return columns; } @@ -228,10 +228,10 @@ private List getColumnDescriptor() { schema = @Schema( name = "TimeSeries.Record", description = "A representation of a time-series record in the form [dateTime, value, qualityCode]", - type="array" + type = "array" ), arraySchema = @Schema( - type="array", + type = "array", example = "[1509654000000, 54.3, 0]", description = "Time is Milliseconds since the UNIX Epoch. Value is Double (for missing data you " + "can use null, or -Float.MAX_VALUE (-340282346638528859811704183484516925440), " @@ -240,6 +240,8 @@ private List getColumnDescriptor() { + "placeholder which can be important in irregular and psuedo regular timeseries." ) ) + + @JsonIgnoreProperties(ignoreUnknown = true) public static class Record { // Explicitly set property order for array serialization @JsonProperty(value = "date-time", index = 0) @@ -254,9 +256,11 @@ public static class Record { int qualityCode; @SuppressWarnings("unused") // required so JAXB can initialize and marshal - private Record() {} + private Record() { + } - protected Record(Timestamp dateTime, Double value, int qualityCode) { + public Record(Timestamp dateTime, Double value, int qualityCode) { + super(); this.dateTime = dateTime; this.value = value; this.qualityCode = qualityCode; @@ -276,33 +280,27 @@ public int getQualityCode() { } @Override - public boolean equals(Object o) - { - if(this == o) - { + public boolean equals(Object o) { + if (this == o) { return true; } - if(o == null || getClass() != o.getClass()) - { + if (o == null || getClass() != o.getClass()) { return false; } - final Record record = (Record) o; + final Record tsRecord = (Record) o; - if(getQualityCode() != record.getQualityCode()) - { + if (getQualityCode() != tsRecord.getQualityCode()) { return false; } - if(getDateTime() != null ? !getDateTime().equals(record.getDateTime()) : record.getDateTime() != null) - { + if (getDateTime() != null ? !getDateTime().equals(tsRecord.getDateTime()) : tsRecord.getDateTime() != null) { return false; } - return getValue() != null ? getValue().equals(record.getValue()) : record.getValue() == null; + return getValue() != null ? getValue().equals(tsRecord.getValue()) : tsRecord.getValue() == null; } @Override - public int hashCode() - { + public int hashCode() { int result = getDateTime() != null ? getDateTime().hashCode() : 0; result = 31 * result + (getValue() != null ? getValue().hashCode() : 0); result = 31 * result + getQualityCode(); @@ -310,20 +308,19 @@ public int hashCode() } @Override - public String toString() - { + public String toString() { return "Record{" + "dateTime=" + dateTime + ", value=" + value + ", qualityCode=" + qualityCode + '}'; } } @Schema(hidden = true, name = "TimeSeries.Column", accessMode = Schema.AccessMode.READ_ONLY) - private static class Column { + protected static class Column { public final String name; public final int ordinal; public final Class datatype; // JAXB seems to need a default ctor - private Column(){ + private Column() { this(null, 0,null); } diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeriesRecordWithDate.java b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeriesRecordWithDate.java new file mode 100644 index 000000000..21066175d --- /dev/null +++ b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeriesRecordWithDate.java @@ -0,0 +1,81 @@ +/* + * + * MIT License + * + * Copyright (c) 2024 Hydrologic Engineering Center + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE + * SOFTWARE. + */ + +package cwms.cda.data.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import io.swagger.v3.oas.annotations.media.Schema; +import java.sql.Timestamp; +import java.util.Objects; + +/** + * TimeSeriesRecordWithDate is a subclass of TimeSeries.Record that includes a data entry date. + * The data entry date is the date that the data was entered into the database. + */ +@JsonDeserialize(using = JsonDeserializer.None.class) +public final class TimeSeriesRecordWithDate extends TimeSeries.Record { + @JsonProperty(value = "data-entry-date", index = 3) + @Schema(implementation = Long.class, description = "Milliseconds since 1970-01-01 (Unix Epoch), always UTC") + @JsonInclude(JsonInclude.Include.NON_DEFAULT) + Timestamp dataEntryDate; + + // Default constructor for Jackson Deserialization + public TimeSeriesRecordWithDate() { + super(null, null, 0); + } + + public TimeSeriesRecordWithDate(Timestamp dateTime, Double value, int qualityCode, Timestamp dataEntryDate) { + super(dateTime, value, qualityCode); + this.dataEntryDate = dataEntryDate; + } + + public Timestamp getDataEntryDate() { + return dataEntryDate; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + TimeSeriesRecordWithDate that = (TimeSeriesRecordWithDate) o; + return Objects.equals(getDataEntryDate(), that.getDataEntryDate()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getDataEntryDate()); + } +} diff --git a/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeriesWithDate.java b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeriesWithDate.java new file mode 100644 index 000000000..78ce3a57b --- /dev/null +++ b/cwms-data-api/src/main/java/cwms/cda/data/dto/TimeSeriesWithDate.java @@ -0,0 +1,177 @@ +/* + * + * MIT License + * + * Copyright (c) 2024 Hydrologic Engineering Center + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE + * SOFTWARE. + */ + +package cwms.cda.data.dto; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.PropertyNamingStrategies; +import com.fasterxml.jackson.databind.annotation.JsonNaming; +import cwms.cda.api.enums.VersionType; +import cwms.cda.formatters.Formats; +import cwms.cda.formatters.annotations.FormattableWith; +import cwms.cda.formatters.json.JsonV2; +import cwms.cda.formatters.xml.XMLv2; +import io.swagger.v3.oas.annotations.media.Schema; +import java.lang.reflect.Field; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonNaming(PropertyNamingStrategies.KebabCaseStrategy.class) +@FormattableWith(contentType = Formats.JSONV2, formatter = JsonV2.class, aliases = {Formats.DEFAULT, Formats.JSON}) +@FormattableWith(contentType = Formats.XMLV2, formatter = XMLv2.class, aliases = {Formats.XML}) +public final class TimeSeriesWithDate extends TimeSeries { + + private List values; + + // list of TimeSeriesWithDate.Record, uses raw to avoid typing errors + @Override + public List getValues() { + return values; + } + + TimeSeriesWithDate() { + super(); + values = new ArrayList<>(); + } + + public TimeSeriesWithDate(TimeSeries timeSeries) { + this(timeSeries.getPage(), timeSeries.getPageSize(), timeSeries.getTotal(), timeSeries.getName(), + timeSeries.getOfficeId(), timeSeries.getBegin(), timeSeries.getEnd(), timeSeries.getUnits(), + timeSeries.getInterval(), timeSeries.getVerticalDatumInfo(), timeSeries.getIntervalOffset(), + timeSeries.getTimeZone(), timeSeries.getVersionDate(), timeSeries.getDateVersionType()); + values = new ArrayList<>(); + } + + public TimeSeriesWithDate(String page, int pageSize, Integer total, String name, String officeId, + ZonedDateTime begin, ZonedDateTime end, String units, Duration interval) { + this(page, pageSize, total, name, officeId, begin, end, units, interval, null, null, + null, null, null); + values = new ArrayList<>(); + } + + public TimeSeriesWithDate(String page, int pageSize, Integer total, String name, String officeId, ZonedDateTime begin, + ZonedDateTime end, String units, Duration interval, VerticalDatumInfo info, ZonedDateTime versionDate, + VersionType dateVersionType) { + this(page, pageSize, total, name, officeId, begin, end, units, interval, info, null, + null, versionDate, dateVersionType); + values = new ArrayList<>(); + } + + public TimeSeriesWithDate(String page, int pageSize, Integer total, String name, String officeId, ZonedDateTime begin, + ZonedDateTime end, String units, Duration interval, VerticalDatumInfo info, Long intervalOffset, + String timeZone, ZonedDateTime versionDate, VersionType dateVersionType) { + super(page, pageSize, total, name, officeId, begin, end, units, interval, info, intervalOffset, + timeZone, versionDate, dateVersionType); + values = new ArrayList<>(); + } + + public void addValue(Timestamp dateTime, Double value, int qualityCode, Timestamp dataEntryDate) { + // Set the current page, if not set + if ((page == null || page.isEmpty()) && (values == null || values.isEmpty())) { + page = encodeCursor(String.format("%d", dateTime.getTime()), pageSize, total); + } + if (pageSize > 0 && values.size() == pageSize) { + nextPage = encodeCursor(String.format("%d", dateTime.toInstant().toEpochMilli()), pageSize, total); + } else { + values.add(new Record(dateTime, value, qualityCode, dataEntryDate)); + } + } + + @Override + public List getValueColumnsJSON() { + return getColumnDescriptor(); + } + + private List getColumnDescriptor() { + List columns = new ArrayList<>(); + for (Field f: TimeSeries.Record.class.getDeclaredFields()) { + JsonProperty field = f.getAnnotation(JsonProperty.class); + if (field != null) { + String fieldName = !field.value().isEmpty() ? field.value() : f.getName(); + int fieldIndex = field.index(); + columns.add(new TimeSeries.Column(fieldName, fieldIndex + 1, f.getType())); + } + } + for (Field f: Record.class.getDeclaredFields()) { + JsonProperty field = f.getAnnotation(JsonProperty.class); + if (field != null) { + String fieldName = !field.value().isEmpty() ? field.value() : f.getName(); + int fieldIndex = field.index(); + columns.add(new TimeSeries.Column(fieldName, fieldIndex + 1, f.getType())); + } + } + + return columns; + } + + public static final class Record extends TimeSeries.Record { + @JsonProperty(value = "data-entry-date", index = 3) + @Schema(implementation = Long.class, description = "Milliseconds since 1970-01-01 (Unix Epoch), always UTC") + @JsonInclude(JsonInclude.Include.NON_DEFAULT) + Timestamp dataEntryDate; + + // Default constructor for Jackson Deserialization + public Record() { + super(null, null, 0); + } + + public Record(Timestamp dateTime, Double value, int qualityCode, Timestamp dataEntryDate) { + super(dateTime, value, qualityCode); + this.dataEntryDate = dataEntryDate; + } + + public Timestamp getDataEntryDate() { + return dataEntryDate; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } + Record that = (Record) o; + return Objects.equals(getDataEntryDate(), that.getDataEntryDate()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getDataEntryDate()); + } + } +} diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesControllerTest.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesControllerTest.java index d528fb93e..58de111bc 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesControllerTest.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeSeriesControllerTest.java @@ -1,8 +1,6 @@ package cwms.cda.api; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.ArgumentMatchers.isNotNull; import static org.mockito.ArgumentMatchers.isNull; @@ -16,19 +14,19 @@ import com.fasterxml.jackson.databind.ObjectMapper; import cwms.cda.data.dao.TimeSeriesDao; import cwms.cda.data.dto.TimeSeries; +import cwms.cda.data.dto.TimeSeriesWithDate; import cwms.cda.formatters.ContentType; import cwms.cda.formatters.Formats; import cwms.cda.formatters.json.JsonV2; import io.javalin.core.util.Header; import io.javalin.http.Context; import java.io.ByteArrayInputStream; -import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import java.time.Duration; +import java.time.Instant; import java.time.ZonedDateTime; -import java.time.temporal.ChronoUnit; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -43,9 +41,6 @@ import org.junit.jupiter.params.provider.ValueSource; class TimeSeriesControllerTest extends ControllerTest { - - - @Test void testDaoMock() throws JsonProcessingException { String officeId = "LRL"; @@ -67,7 +62,7 @@ void testDaoMock() throws JsonProcessingException { when( dao.getTimeseries(eq(""), eq(500), eq(tsId), eq(officeId), eq("EN"), - isNotNull(), isNotNull(), isNull(), eq(true) )).thenReturn(expected); + isNotNull(), isNotNull(), isNull(), eq(true), eq(false))).thenReturn(expected); // build mock request and response @@ -113,7 +108,7 @@ protected TimeSeriesDao getTimeSeriesDao(DSLContext dsl) { // Check that the controller accessed our mock dao in the expected way verify(dao, times(1)). getTimeseries(eq(""), eq(500), eq(tsId), eq(officeId), eq("EN"), - isNotNull(), isNotNull(), isNull(), eq(true));// + isNotNull(), isNotNull(), isNull(), eq(true), eq(false));// // Make sure controller thought it was happy verify(response).setStatus(200); @@ -134,11 +129,69 @@ private void assertSimilar(TimeSeries expected, TimeSeries actual) { // Make sure ts we got back resembles the fakeTS our mock dao was supposed to return. assertEquals(expected.getOfficeId(), actual.getOfficeId(), "offices did not match"); assertEquals(expected.getName(), actual.getName(), "names did not match"); - assertEquals(expected.getValues(), actual.getValues(), "values did not match"); + assertRecordsMatch(expected.getValues(), actual.getValues()); + assertTrue(expected.getBegin().isEqual(actual.getBegin()), "begin dates not equal"); + assertTrue(expected.getEnd().isEqual(actual.getEnd()), "end dates not equal"); + } + + private void assertSimilarWithDate(TimeSeriesWithDate expected, TimeSeriesWithDate actual) + { + assertEquals(expected.getOfficeId(), actual.getOfficeId(), "offices did not match"); + assertEquals(expected.getName(), actual.getName(), "names did not match"); + assertDateRecordsMatch(expected.getValues(), actual.getValues()); assertTrue(expected.getBegin().isEqual(actual.getBegin()), "begin dates not equal"); assertTrue(expected.getEnd().isEqual(actual.getEnd()), "end dates not equal"); } + private void assertRecordsMatch(List expected, List actual) { + for (int i = 0; i < expected.size(); i++) { + assertEquals(expected.get(i).getDateTime(), actual.get(i).getDateTime(), "Timestamps did not match"); + assertEquals(expected.get(i).getValue(), actual.get(i).getValue(), "Values did not match"); + assertEquals(expected.get(i).getQualityCode(), actual.get(i).getQualityCode(), "Quality codes did not match"); + } + } + + private void assertDateRecordsMatch(List expected, List actual) { + for (int i = 0; i < expected.size(); i++) { + assertEquals(((TimeSeriesWithDate.Record) expected.get(i)).getDataEntryDate(), + ((TimeSeriesWithDate.Record) actual.get(i)).getDataEntryDate(), "Entry dates did not match"); + assertEquals(expected.get(i).getDateTime(), actual.get(i).getDateTime(), "Timestamps did not match"); + assertEquals(expected.get(i).getValue(), actual.get(i).getValue(), "Values did not match"); + assertEquals(expected.get(i).getQualityCode(), actual.get(i).getQualityCode(), "Quality codes did not match"); + } + } + + @ParameterizedTest + @ValueSource(strings = {Formats.XMLV2, Formats.JSONV2}) + void testSerializeTimeSeries(String format) { + String officeId = "LRL"; + String tsId = "RYAN3.Stage.Inst.5Minutes.0.ZSTORE_TS_TEST"; + TimeSeries fakeTs = buildTimeSeries(officeId, tsId); + ContentType contentType = Formats.parseHeader(format, TimeSeries.class); + String formatted = Formats.format(contentType, fakeTs); + assertNotNull(formatted); + TimeSeries ts2 = Formats.parseContent(contentType, formatted, TimeSeries.class); + assertNotNull(ts2); + assertSimilar(fakeTs, ts2); + } + + @ParameterizedTest + @ValueSource(strings = {Formats.XMLV2, Formats.JSONV2}) + void testSerializeTimeSeriesWithDataEntryDate(String format) { + String officeId = "LRL"; + String tsId = "RYAN3.Stage.Inst.5Minutes.0.ZSTORE_TS_TEST"; + TimeSeriesWithDate fakeTs = buildTimeSeriesWithEntryDate(officeId, tsId); + assertEquals(4, fakeTs.getValueColumnsJSON().size()); + assertInstanceOf(TimeSeriesWithDate.Record.class, fakeTs.getValues().get(0)); + ContentType contentType = Formats.parseHeader(format, TimeSeriesWithDate.class); + String formatted = Formats.format(contentType, fakeTs); + assertNotNull(formatted); + TimeSeriesWithDate ts2 = Formats.parseContent(contentType, formatted, TimeSeriesWithDate.class); + assertNotNull(ts2); + assertSimilarWithDate(fakeTs, ts2); + } + + @ParameterizedTest @ValueSource(strings = {Formats.XMLV2, Formats.JSONV2}) void testDeserializeTimeSeries(String format) { @@ -152,6 +205,35 @@ void testDeserializeTimeSeries(String format) { assertSimilar(fakeTs, ts2); } + @ParameterizedTest + @ValueSource(strings = {Formats.XMLV2, Formats.JSONV2}) + void testDeserializeTimeSeriesWithEntryDate(String format) { + String officeId = "LRL"; + String tsId = "RYAN3.Stage.Inst.5Minutes.0.ZSTORE_TS_TEST"; + TimeSeriesWithDate fakeTs = buildTimeSeriesWithEntryDate(officeId, tsId); + ContentType contentType = Formats.parseHeader(format, TimeSeriesWithDate.class); + String formatted = Formats.format(contentType, fakeTs); + TimeSeriesWithDate ts2 = Formats.parseContent(contentType, formatted, TimeSeriesWithDate.class); + assertNotNull(ts2); + assertSimilarWithDate(fakeTs, ts2); + } + + @Test + void testXMLSerializeDeserializeTimeSeries() + { + String format = Formats.XMLV2; + String officeId = "LRL"; + String tsId = "RYAN3.Stage.Inst.5Minutes.0.ZSTORE_TS_TEST"; + TimeSeriesWithDate fakeTs = buildTimeSeriesWithEntryDate(officeId, tsId); + ContentType contentType = Formats.parseHeader(format, TimeSeriesWithDate.class); + String formatted = Formats.format(contentType, fakeTs); + assertTrue(formatted.contains("quality-code")); + assertTrue(formatted.contains("data-entry-date")); + TimeSeriesWithDate ts2 = Formats.parseContent(contentType, formatted, TimeSeriesWithDate.class); + assertNotNull(ts2); + assertSimilarWithDate(fakeTs, ts2); + } + @Test void testDeserializeTimeSeriesXmlUTC() { TimeZone aDefault = TimeZone.getDefault(); @@ -228,7 +310,38 @@ private TimeSeries buildTimeSeries(String officeId, String tsId) { for(int i = 0; i < count; i++) { Timestamp dateTime = Timestamp.from(next.toInstant()); ts.addValue(dateTime, (double) i, 0); - next = next.plus(minutes, ChronoUnit.MINUTES); + next = next.plusMinutes(minutes); + } + return ts; + } + + @NotNull + private TimeSeriesWithDate buildTimeSeriesWithEntryDate(String officeId, String tsId) { + ZonedDateTime start = ZonedDateTime.parse("2021-06-21T08:00:00-07:00[PST8PDT]"); + ZonedDateTime end = ZonedDateTime.parse("2021-06-21T09:00:00-07:00[PST8PDT]"); + + long diff = end.toEpochSecond() - start.toEpochSecond(); + assertEquals(3600, diff); // just to make sure I've got the date parsing thing right. + + int minutes = 15; + int count = 60/15 ; // do I need a +1? ie should this be 12 or 13? + // Also, should end be the last point or the next interval? + + TimeSeriesWithDate ts = new TimeSeriesWithDate(null, + -1, + 0, + tsId, + officeId, + start, + end, + "m", + Duration.ofMinutes(minutes)); + + ZonedDateTime next = start; + for(int i = 0; i < count; i++) { + Timestamp dateTime = Timestamp.from(next.toInstant()); + ts.addValue(dateTime, (double) i, 0, Timestamp.from(Instant.now())); + next = next.plusMinutes(minutes); } return ts; } diff --git a/cwms-data-api/src/test/java/cwms/cda/api/TimeseriesControllerTestIT.java b/cwms-data-api/src/test/java/cwms/cda/api/TimeseriesControllerTestIT.java index cfd95daa0..f79fdc9ac 100644 --- a/cwms-data-api/src/test/java/cwms/cda/api/TimeseriesControllerTestIT.java +++ b/cwms-data-api/src/test/java/cwms/cda/api/TimeseriesControllerTestIT.java @@ -21,6 +21,7 @@ import io.restassured.filter.log.LogDetail; import io.restassured.path.json.config.JsonPathConfig; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.sql.PreparedStatement; import java.sql.SQLException; import java.time.ZonedDateTime; @@ -42,7 +43,7 @@ void test_lrl_timeseries_psuedo_reg1hour() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/pseudo_reg_1hour.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -75,7 +76,6 @@ void test_lrl_timeseries_psuedo_reg1hour() throws Exception { .config(RestAssured.config().jsonConfig(jsonConfig().numberReturnType(JsonPathConfig.NumberReturnType.DOUBLE))) .log().ifValidationFails(LogDetail.ALL,true) .accept(Formats.JSONV2) -// .body(tsData) .header("Authorization",user.toHeaderValue()) .queryParam("office",officeId) .queryParam("units","cfs") @@ -109,7 +109,7 @@ void test_lrl_1day() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -143,7 +143,6 @@ void test_lrl_1day() throws Exception { .config(RestAssured.config().jsonConfig(jsonConfig().numberReturnType(JsonPathConfig.NumberReturnType.DOUBLE))) .log().ifValidationFails(LogDetail.ALL, true) .accept(Formats.JSONV2) -// .body(tsData) .header("Authorization", user.toHeaderValue()) .queryParam("office", officeId) .queryParam("units", "F") @@ -173,7 +172,7 @@ void test_lrl_1day_bad_units() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1day_offset_bad_units.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -211,7 +210,7 @@ void test_lrl_1day_malicious_units() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1day_offset_malicious_units.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); ObjectMapper mapper = new ObjectMapper(); JsonNode ts = mapper.readTree(tsData); @@ -242,6 +241,91 @@ void test_lrl_1day_malicious_units() throws Exception { } + @Test + void test_include_data_entry_date() throws Exception { + ObjectMapper mapper = new ObjectMapper(); + final String includeDataEntryDate = "include-entry-date"; + + InputStream resource = this.getClass().getResourceAsStream( + "/cwms/cda/api/spk/num_ts_create2.json"); + assertNotNull(resource); + + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); + + JsonNode ts = mapper.readTree(tsData); + String location = ts.get("name").asText().split("\\.")[0]; + String officeId = ts.get("office-id").asText(); + createLocation(location, true, officeId); + + TestAccounts.KeyUser user = TestAccounts.KeyUser.SPK_NORMAL; + + // inserting the time series + given() + .log().ifValidationFails(LogDetail.ALL, true) + .accept(Formats.JSONV2) + .contentType(Formats.JSONV2) + .body(tsData) + .header("Authorization", user.toHeaderValue()) + .queryParam("office", officeId) + .when() + .redirects().follow(true) + .redirects().max(3) + .post("/timeseries/") + .then() + .log().ifValidationFails(LogDetail.ALL, true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)); + + // 1675335600000 is Thursday, February 2, 2023 11:00:00 AM + // fyi 1675422000000 is Friday, February 3, 2023 11:00:00 AM + + // get it back with the data entry date + given() + .log().ifValidationFails(LogDetail.ALL, true) + .accept(Formats.JSONV2) + .header("Authorization", user.toHeaderValue()) + .queryParam(Controllers.OFFICE, officeId) + .queryParam(Controllers.UNIT, "CFS") + .queryParam(Controllers.NAME, ts.get("name").asText()) + .queryParam(Controllers.BEGIN, "2007-02-02T11:00:00Z") + .queryParam(Controllers.END, "2010-02-03T11:00:00Z") + .queryParam(Controllers.VERSION_DATE, "2021-06-20T08:00:00-0000[UTC]") + .queryParam(includeDataEntryDate, true) + .when() + .redirects().follow(true) + .redirects().max(3) + .get("/timeseries/") + .then() + .log().ifValidationFails(LogDetail.ALL, true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)) + .body("values.size()", equalTo(4)) + .body("values[0][1]", equalTo(4.0F)) + .body("values[0].size()", equalTo(4)); + + // get it back without the data entry date + given() + .log().ifValidationFails(LogDetail.ALL, true) + .accept(Formats.JSONV2) + .header("Authorization", user.toHeaderValue()) + .queryParam(Controllers.OFFICE, officeId) + .queryParam(Controllers.UNIT, "CFS") + .queryParam(Controllers.NAME, ts.get("name").asText()) + .queryParam(Controllers.BEGIN, "2007-02-02T11:00:00Z") + .queryParam(Controllers.END, "2010-02-03T11:00:00Z") + .queryParam(Controllers.VERSION_DATE, "2021-06-20T08:00:00-0000[UTC]") + .when() + .redirects().follow(true) + .redirects().max(3) + .get("/timeseries/") + .then() + .log().ifValidationFails(LogDetail.ALL, true) + .assertThat() + .statusCode(is(HttpServletResponse.SC_OK)) + .body("values.size()", equalTo(4)) + .body("values[0][1]", equalTo(4.0F)) + .body("values[0].size()", equalTo(3)); + } @Test void test_delete_ts() throws Exception { @@ -251,7 +335,7 @@ void test_delete_ts() throws Exception { "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -330,7 +414,7 @@ void test_no_office_permissions() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/timeseries/no_office_perms.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -365,7 +449,7 @@ void test_v1_cant_trim() throws Exception { "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); ObjectMapper mapper = new ObjectMapper(); JsonNode ts = mapper.readTree(tsData); @@ -402,7 +486,7 @@ void test_v1_cant_version() throws Exception { "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); ObjectMapper mapper = new ObjectMapper(); JsonNode ts = mapper.readTree(tsData); @@ -441,7 +525,7 @@ void test_v2_cant_datum() throws Exception { "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); ObjectMapper mapper = new ObjectMapper(); JsonNode ts = mapper.readTree(tsData); @@ -479,7 +563,7 @@ void test_lrl_trim() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -522,7 +606,6 @@ void test_lrl_trim() throws Exception { .config(RestAssured.config().jsonConfig(jsonConfig().numberReturnType(JsonPathConfig.NumberReturnType.DOUBLE))) .log().ifValidationFails(LogDetail.ALL, true) .accept(Formats.JSONV2) -// .body(tsData) .header("Authorization", user.toHeaderValue()) .queryParam("office", officeId) .queryParam("units", "F") @@ -549,7 +632,6 @@ void test_lrl_trim() throws Exception { .config(RestAssured.config().jsonConfig(jsonConfig().numberReturnType(JsonPathConfig.NumberReturnType.DOUBLE))) .log().ifValidationFails(LogDetail.ALL, true) .accept(Formats.JSONV2) -// .body(tsData) .header("Authorization", user.toHeaderValue()) .queryParam("office", officeId) .queryParam("units", "F") @@ -581,7 +663,7 @@ void test_big_create() throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); String giantString = buildBigString(tsData, 200000); // 200k points looked like about 6MB. @@ -661,7 +743,7 @@ private String buildBigString(String tsData, int count) throws JsonProcessingExc StringBuilder sb = new StringBuilder(); for (int i = 0; i < count; i++) { long time = start2 + (diff * (i+1)); - sb.append(String.format(",\n [ %d, %d, %d]", time, count, 0)); + sb.append(String.format(",%n [ %d, %d, %d]", time, count, 0)); } return prefix + sb + "\n ]\n}"; @@ -674,7 +756,7 @@ void test_daylight_saving_retrieve()throws Exception { InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1hour.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); int count = 365 * 24 * 5; // 5 years of hourly data (43.8k points) @@ -794,7 +876,7 @@ void test_lrl_1day_content_type_aliasing(GetAllTest test) throws Exception InputStream resource = this.getClass().getResourceAsStream( "/cwms/cda/api/lrl/1day_offset.json"); assertNotNull(resource); - String tsData = IOUtils.toString(resource, "UTF-8"); + String tsData = IOUtils.toString(resource, StandardCharsets.UTF_8); JsonNode ts = mapper.readTree(tsData); String location = ts.get("name").asText().split("\\.")[0]; @@ -850,13 +932,13 @@ enum GetAllTest XMLV2(Formats.XMLV2, Formats.XMLV2), ; - final String _accept; - final String _expectedContentType; + final String accept; + final String expectedContentType; GetAllTest(String accept, String expectedContentType) { - _accept = accept; - _expectedContentType = expectedContentType; + this.accept = accept; + this.expectedContentType = expectedContentType; } } } diff --git a/cwms-data-api/src/test/java/cwms/cda/data/dao/TimeSeriesDaoTest.java b/cwms-data-api/src/test/java/cwms/cda/data/dao/TimeSeriesDaoTest.java index 46e96baf5..dc68ee4f8 100644 --- a/cwms-data-api/src/test/java/cwms/cda/data/dao/TimeSeriesDaoTest.java +++ b/cwms-data-api/src/test/java/cwms/cda/data/dao/TimeSeriesDaoTest.java @@ -1,6 +1,5 @@ package cwms.cda.data.dao; -import java.math.BigDecimal; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; @@ -14,6 +13,7 @@ import java.util.logging.Level; import java.util.logging.Logger; +import cwms.cda.data.dto.TimeSeriesWithDate; import org.jooq.DSLContext; import org.jooq.Record1; import org.jooq.impl.DSL; @@ -24,7 +24,6 @@ import usace.cwms.db.dao.util.CwmsDatabaseVersionInfo; import usace.cwms.db.dao.util.TimeValueQuality; import usace.cwms.db.jooq.JooqCwmsDatabaseVersionInfoFactory; -import usace.cwms.db.jooq.codegen.tables.AV_CWMS_TS_ID2; import usace.cwms.db.jooq.codegen.tables.AV_LOC; import usace.cwms.db.jooq.dao.CwmsDbLocJooq; import usace.cwms.db.jooq.dao.CwmsDbTsJooq; @@ -114,7 +113,7 @@ public class TimeSeriesDaoTest @Test - public void testCreateEmpty() throws Exception + void testCreateEmpty() throws Exception { String officeId = "LRL"; @@ -144,7 +143,7 @@ public void testCreateEmpty() throws Exception } @Test - public void testCreateWithData() throws Exception + void testCreateWithData() throws Exception { String officeId = "LRL"; @@ -153,7 +152,6 @@ public void testCreateWithData() throws Exception DSLContext lrl = getDslContext(connection, officeId); TimeSeriesDao dao = new TimeSeriesDaoImpl(lrl); - Calendar instance = Calendar.getInstance(); String tsId = TIME_SERIES_ID; // Do I need to somehow check whether the location exists? Its not going to exist if I add the millis to it... if(!locationExists(connection, "RYAN3")) @@ -171,7 +169,7 @@ public void testCreateWithData() throws Exception int count = 60 / 15; // do I need a +1? ie should this be 12 or 13? // Also, should end be the last point or the next interval? - TimeSeries ts = new TimeSeries(null, -1, 0, tsId, officeId, start, end, "m", Duration.ofMinutes(minutes)); + TimeSeriesWithDate ts = new TimeSeriesWithDate(null, -1, 0, tsId, officeId, start, end, "m", Duration.ofMinutes(minutes)); ZonedDateTime next = start; for(int i = 0; i < count; i++) @@ -209,17 +207,8 @@ private void storeLocation(Connection connection, String officeId, String locati locationId, null, null, true, true); } - private BigDecimal retrieveTsCode(Connection connection, String tsId) throws Exception - { - BigDecimal bigD = DSL.using(connection).select(AV_CWMS_TS_ID2.AV_CWMS_TS_ID2.TS_CODE).from( - AV_CWMS_TS_ID2.AV_CWMS_TS_ID2).where(AV_CWMS_TS_ID2.AV_CWMS_TS_ID2.CWMS_TS_ID.eq(tsId)).fetchOptional( - AV_CWMS_TS_ID2.AV_CWMS_TS_ID2.TS_CODE).orElse(null); - - return bigD; - } - @Test - public void testTimeSeriesStoreRetrieve() throws Exception + void testTimeSeriesStoreRetrieve() throws Exception { Connection connection = getConnection(); @@ -265,15 +254,15 @@ private void createTs(CwmsDbTsJooq cwmsTsJdbc, Connection connection) throws SQL } catch(Exception e) { + LOGGER.log(Level.CONFIG, "Unable to create TimeSeries: " + e.getMessage()); } } @Test - public void testVersion() throws SQLException + void testVersion() throws SQLException { JooqCwmsDatabaseVersionInfoFactory fac = new JooqCwmsDatabaseVersionInfoFactory(); - String officeId = "LRL"; try(Connection connection = getConnection()) { CwmsDatabaseVersionInfo info = fac.retrieveVersionInfo(connection);