Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes #634 - Implemented data entry date option for TS data retrieval #927

Open
wants to merge 15 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions cwms-data-api/src/main/java/cwms/cda/api/TimeSeriesController.java
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Post/Patch should throw an error if the client is sending a time series with a data-entry-date as that field isn't editable

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added a check for data entry date values

Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@
import io.javalin.plugin.openapi.annotations.OpenApiParam;
import io.javalin.plugin.openapi.annotations.OpenApiRequestBody;
import io.javalin.plugin.openapi.annotations.OpenApiResponse;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
Expand All @@ -84,7 +83,7 @@

public class TimeSeriesController implements CrudHandler {
private static final Logger logger = Logger.getLogger(TimeSeriesController.class.getName());

private static final String INCLUDE_ENTRY_DATE = "include-entry-date";
adamkorynta marked this conversation as resolved.
Show resolved Hide resolved
public static final String TAG = "TimeSeries";
public static final String STORE_RULE_DESC = "The business rule to use "
+ "when merging the incoming with existing data\n"
Expand Down Expand Up @@ -204,7 +203,7 @@ public void create(@NotNull Context ctx) {
TimeSeries timeSeries = deserializeTimeSeries(ctx);
dao.create(timeSeries, createAsLrts, storeRule, overrideProtection);
ctx.status(HttpServletResponse.SC_OK);
} catch (IOException | DataAccessException ex) {
} catch (DataAccessException ex) {
CdaError re = new CdaError("Internal Error");
logger.log(Level.SEVERE, re.toString(), ex);
ctx.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR).json(re);
Expand Down Expand Up @@ -382,6 +381,8 @@ public void delete(@NotNull Context ctx, @NotNull String timeseries) {
+ "\n* `xml`"
+ "\n* `wml2` (only if name field is specified)"
+ "\n* `json` (default)"),
@OpenApiParam(name = INCLUDE_ENTRY_DATE, type = Boolean.class, description = "Specifies "
+ "whether to include the data entry date in the response. Default is false."),
adamkorynta marked this conversation as resolved.
Show resolved Hide resolved
@OpenApiParam(name = PAGE, description = "This end point can return large amounts "
+ "of data as a series of pages. This parameter is used to describes the "
+ "current location in the response stream. This is an opaque "
Expand Down Expand Up @@ -431,6 +432,9 @@ public void getAll(@NotNull Context ctx) {

ZonedDateTime versionDate = queryParamAsZdt(ctx, VERSION_DATE);

boolean includeEntryDate = ctx.queryParamAsClass(INCLUDE_ENTRY_DATE, Boolean.class)
.getOrDefault(false);

// The following parameters are only used for jsonv2 and xmlv2
String cursor = queryParamAsClass(ctx, new String[]{PAGE, CURSOR},
String.class, "", metrics, name(TimeSeriesController.class.getName(),
Expand Down Expand Up @@ -463,7 +467,7 @@ public void getAll(@NotNull Context ctx) {

String office = requiredParam(ctx, OFFICE);
TimeSeries ts = dao.getTimeseries(cursor, pageSize, names, office, unit,
beginZdt, endZdt, versionDate, trim.getOrDefault(true));
beginZdt, endZdt, versionDate, trim.getOrDefault(true), includeEntryDate);

results = Formats.format(contentType, ts);

Expand Down Expand Up @@ -573,14 +577,14 @@ public void update(@NotNull Context ctx, @NotNull String id) {
dao.store(timeSeries, createAsLrts, storeRule, overrideProtection);

ctx.status(HttpServletResponse.SC_OK);
} catch (IOException | DataAccessException ex) {
} catch (DataAccessException ex) {
CdaError re = new CdaError("Internal Error");
logger.log(Level.SEVERE, re.toString(), ex);
ctx.status(HttpServletResponse.SC_INTERNAL_SERVER_ERROR).json(re);
}
}

private TimeSeries deserializeTimeSeries(Context ctx) throws IOException {
private TimeSeries deserializeTimeSeries(Context ctx) {
String contentTypeHeader = ctx.req.getContentType();
ContentType contentType = Formats.parseHeader(contentTypeHeader, TimeSeries.class);
return Formats.parseContent(contentType, ctx.bodyAsInputStream(), TimeSeries.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
Expand Down Expand Up @@ -623,7 +622,7 @@ private static TimeSeries buildTimeSeries(ILocationLevelRef levelRef, Interval i
if (qualityCode != null) {
quality = qualityCode.intValue();
}
timeSeries.addValue(dateTime, value, quality);
timeSeries.addValue(dateTime, value, quality, null);
adamkorynta marked this conversation as resolved.
Show resolved Hide resolved
}
return timeSeries;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ void store(TimeSeries timeSeries, boolean createAsLrts,

TimeSeries getTimeseries(String cursor, int pageSize, String names, String office,
String unit, ZonedDateTime begin, ZonedDateTime end,
ZonedDateTime versionDate, boolean trim);
ZonedDateTime versionDate, boolean trim, boolean includeEntryDate);

String getTimeseries(String format, String names, String office, String unit, String datum,
ZonedDateTime begin, ZonedDateTime end, ZoneId timezone);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.jooq.Record;
import org.jooq.Record1;
import org.jooq.Record3;
import org.jooq.Record4;
import org.jooq.Record7;
import org.jooq.Result;
import org.jooq.SQL;
Expand Down Expand Up @@ -165,7 +166,7 @@ public String getTimeseries(String format, String names, String office, String u
public TimeSeries getTimeseries(String page, int pageSize, String names, String office,
String units,
ZonedDateTime beginTime, ZonedDateTime endTime,
ZonedDateTime versionDate, boolean shouldTrim) {
ZonedDateTime versionDate, boolean shouldTrim, boolean includeEntryDate) {
TimeSeries retVal = null;
String cursor = null;
Timestamp tsCursor = null;
Expand Down Expand Up @@ -237,7 +238,7 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String

// put all those columns together as "valid"
CommonTableExpression<Record7<BigDecimal, String, String, String, String, BigDecimal,
String>> valid =
String>> valid =
name("valid").fields("tscode", "tsid", "office_id", "loc_part", "units",
"interval", "parm_part")
.as(
Expand All @@ -249,7 +250,6 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String
unit.as("units"),
ival.as("interval"),
param.as("parm_part")

).from(validTs)
);

Expand Down Expand Up @@ -369,6 +369,8 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String
);
});

Field<Timestamp> dataEntryDate = field("DATA_ENTRY_DATE", Timestamp.class).as("data_entry_date");

if (pageSize != 0) {
SelectConditionStep<Record3<Timestamp, Double, BigDecimal>> query =
dsl.select(
Expand All @@ -391,14 +393,55 @@ public TimeSeries getTimeseries(String page, int pageSize, String names, String
query.limit(DSL.val(pageSize + 1));
}

logger.fine(() -> query.getSQL(ParamType.INLINED));
SelectConditionStep<Record3<Timestamp, Double, BigDecimal>> finalQuery = query;
logger.fine(() -> finalQuery.getSQL(ParamType.INLINED));

query.forEach(tsRecord -> timeseries.addValue(
tsRecord.getValue(dateTimeCol),
tsRecord.getValue(valueCol),
tsRecord.getValue(qualityNormCol).intValue()
)
);
if (includeEntryDate) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This query is doubling the time it takes to retrieve time series. Can this replace the retrieve_ts_out_tab calls?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While it could replace the retrieve_ts_out_tab call above, doing so would require implementing trim support into the query, as that is currently handled by the retrieve_ts_out_tab call. I haven't quite figured out the best way to do so, so maybe we can discuss this in more detail

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if the schema needs to provide more, we can update the schema call

SelectConditionStep<Record4<Timestamp, Double, BigDecimal, Timestamp>> query2 = dsl.select(
dateTimeCol,
valueCol,
qualityNormCol,
dataEntryDate
)
.from(AV_TSV_DQU.AV_TSV_DQU)
.where(dateTimeCol
.greaterOrEqual(CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2(
DSL.nvl(DSL.val(tsCursor == null ? null :
tsCursor.toInstant().toEpochMilli()),
DSL.val(beginTime.toInstant().toEpochMilli())))))
.and(dateTimeCol
.lessOrEqual(CWMS_UTIL_PACKAGE.call_TO_TIMESTAMP__2(
DSL.val(endTime.toInstant().toEpochMilli())))
.and(AV_TSV_DQU.AV_TSV_DQU.CWMS_TS_ID.equalIgnoreCase(names))
.and(AV_TSV_DQU.AV_TSV_DQU.OFFICE_ID.eq(office))
.and(AV_TSV_DQU.AV_TSV_DQU.UNIT_ID.equalIgnoreCase(unit))
.and(AV_TSV_DQU.AV_TSV_DQU.VERSION_DATE.eq(versionDate == null ? null :
Timestamp.from(versionDate.toInstant())))
);

if (pageSize > 0) {
query2.limit(DSL.val(pageSize + 1));
}
query2.forEach(tsRecord -> {
assert timeseries != null;
timeseries.addValue(
tsRecord.getValue(dateTimeCol),
tsRecord.getValue(valueCol),
tsRecord.getValue(qualityNormCol).intValue(),
tsRecord.getValue(dataEntryDate)
);
});
} else {
query.forEach(tsRecord -> {
assert timeseries != null;
timeseries.addValue(
tsRecord.getValue(dateTimeCol),
tsRecord.getValue(valueCol),
tsRecord.getValue(qualityNormCol).intValue(),
null
);
});
}

retVal = timeseries;
}
Expand Down
Loading
Loading