Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error logging limited #112

Merged
merged 10 commits into from
Oct 16, 2018
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ private void unRegisterJob () {
/**
* This method must be overridden by subclasses to perform the core steps of the job.
*/
public abstract void jobLogic();
public abstract void jobLogic() throws Exception;

/**
* This method may be overridden in the event that you want to perform a special final step after this job and
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import java.util.Date;

import static com.conveyal.datatools.common.utils.SparkUtils.haltWithMessage;
import static spark.Spark.halt;

/**
* Created by landon on 8/2/16.
Expand All @@ -40,7 +39,7 @@ public static String uploadBranding(Request req, String key) throws IOException,

String s3Bucket = DataManager.getConfigPropertyAsText("application.data.gtfs_s3_bucket");
if (s3Bucket == null) {
halt(400);
haltWithMessage(req, 400, "s3bucket is incorrectly configured on server");
}

// Get file from request
Expand All @@ -58,7 +57,7 @@ public static String uploadBranding(Request req, String key) throws IOException,
IOUtils.copy(inputStream, out);
} catch (Exception e) {
e.printStackTrace();
haltWithMessage(400, "Unable to read uploaded file");
haltWithMessage(req, 400, "Unable to read uploaded file");
}

try {
Expand All @@ -73,7 +72,7 @@ public static String uploadBranding(Request req, String key) throws IOException,
return url;
} catch (AmazonServiceException ase) {
ase.printStackTrace();
haltWithMessage(400, "Error uploading file to S3");
haltWithMessage(req, 400, "Error uploading file to S3");
return null;
} finally {
boolean deleted = tempFile.delete();
Expand Down
140 changes: 126 additions & 14 deletions src/main/java/com/conveyal/datatools/common/utils/SparkUtils.java
Original file line number Diff line number Diff line change
@@ -1,17 +1,25 @@
package com.conveyal.datatools.common.utils;

import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.io.ByteStreams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.HaltException;
import spark.Request;
import spark.Response;

import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Arrays;

import static com.conveyal.datatools.manager.DataManager.getConfigPropertyAsText;
import static spark.Spark.halt;

/**
Expand All @@ -20,12 +28,14 @@
public class SparkUtils {
private static final Logger LOG = LoggerFactory.getLogger(SparkUtils.class);
private static final ObjectMapper mapper = new ObjectMapper();
private static final String BASE_URL = getConfigPropertyAsText("application.public_url");
private static final int DEFAULT_LINES_TO_PRINT = 10;

/**
* Write out the supplied file to the Spark response as an octet-stream.
*/
public static HttpServletResponse downloadFile(File file, String filename, Response res) {
if (file == null) haltWithMessage(404, "File is null");
public static HttpServletResponse downloadFile(File file, String filename, Request req, Response res) {
if (file == null) haltWithMessage(req, 404, "File is null");
HttpServletResponse raw = res.raw();
raw.setContentType("application/octet-stream");
raw.setHeader("Content-Disposition", "attachment; filename=" + filename);
Expand All @@ -41,7 +51,8 @@ public static HttpServletResponse downloadFile(File file, String filename, Respo
outputStream.flush();
} catch (Exception e) {
LOG.error("Could not write file to output stream", e);
haltWithMessage(500, "Error serving GTFS file");
e.printStackTrace();
haltWithMessage(req, 500, "Error serving GTFS file", e);
}
return raw;
}
Expand All @@ -56,31 +67,51 @@ public static String formatJSON (String key, String value) {
}

/**
* Constructs a JSON string with a result (i.e., OK or ERR), message, code, and if the exception argument is
* Constructs an object node with a result (i.e., OK or ERR), message, code, and if the exception argument is
* supplied details about the exception encountered.
*/
public static String formatJSON(String message, int code, Exception e) {
public static ObjectNode getObjectNode(String message, int code, Exception e) {
String detail = e != null ? e.getMessage() : null;
return mapper.createObjectNode()
.put("result", code >= 400 ? "ERR" : "OK")
.put("message", message)
.put("code", code)
.put("detail", detail)
.toString();
.put("result", code >= 400 ? "ERR" : "OK")
.put("message", message)
.put("code", code)
.put("detail", detail);
}

/**
* Constructs a JSON string with a result (i.e., OK or ERR), message, code, and if the exception argument is
* supplied details about the exception encountered.
*/
public static String formatJSON(String message, int code, Exception e) {
return getObjectNode(message, code, e).toString();
}

/**
* Wrapper around Spark halt method that formats message as JSON using {@link SparkUtils#formatJSON}.
*/
public static void haltWithMessage(int statusCode, String message) throws HaltException {
halt(statusCode, formatJSON(message, statusCode));
public static void haltWithMessage(Request request, int statusCode, String message) throws HaltException {
haltWithMessage(request, statusCode, message, null);
}

/**
* Wrapper around Spark halt method that formats message as JSON using {@link SparkUtils#formatJSON}. Exception
*/
public static void haltWithMessage(int statusCode, String message, Exception e) throws HaltException {
halt(statusCode, formatJSON(message, statusCode, e));
public static void haltWithMessage(
Request request,
int statusCode,
String message,
Exception e
) throws HaltException {
JsonNode json = getObjectNode(message, statusCode, e);
String logString = null;
try {
logString = "\n" + mapper.writerWithDefaultPrettyPrinter().writeValueAsString(json);
} catch (JsonProcessingException jpe) {
logString = message;
}
logRequestOrResponse(false, request, logString, statusCode);
halt(statusCode, json.toString());
}

/**
Expand All @@ -99,4 +130,85 @@ public static String formatJobMessage (String jobId, String message) {
.put("message", message)
.toString();
}

/**
* Log Spark requests.
*/
public static void logRequest(Request request, Response response) {
logRequestOrResponse(true, request, response);
}

/**
* Log Spark responses.
*/
public static void logResponse(Request request, Response response) {
logRequestOrResponse(false, request, response);
}

/**
* Log request/response. Pretty print JSON if the content-type is JSON.
*/
public static void logRequestOrResponse(boolean logRequest, Request request, Response response) {
// NOTE: Do not attempt to read the body into a string until it has been determined that the content-type is
// JSON.
HttpServletResponse raw = response.raw();
String bodyString = "";
try {
String contentType;
if (logRequest) {
contentType = request.contentType();
} else {
contentType = raw.getHeader("content-type");
}
if ("application/json".equals(contentType)) {
bodyString = logRequest ? request.body() : response.body();
if (bodyString != null) {
// Pretty print JSON if ContentType is JSON and body is not empty
JsonNode jsonNode = mapper.readTree(bodyString);
// Add new line for legibility when printing
bodyString = "\n" + mapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonNode);
} else {
bodyString = "{body content is null}";
}
} else if (contentType != null) {
bodyString = String.format("\nnon-JSON body type: %s", contentType);
}
} catch (IOException e) {
LOG.warn("Could not parse JSON", e);
bodyString = "\nBad JSON:\n" + bodyString;
}
logRequestOrResponse(logRequest, request, bodyString, raw.getStatus());
}

public static void logRequestOrResponse(
boolean logRequest,
Request request,
String bodyString,
int statusCode
) {
Auth0UserProfile userProfile = request.attribute("user");
String userEmail = userProfile != null ? userProfile.getEmail() : "no-auth";
String queryString = request.queryParams().size() > 0 ? "?" + request.queryString() : "";
LOG.info(
"{} {} {}: {}{}{}{}",
logRequest ? "req" : String.format("res (%s)", statusCode),
userEmail,
request.requestMethod(),
BASE_URL,
request.pathInfo(),
queryString,
trimLines(bodyString)
);
}

private static String trimLines(String str) {
if (str == null) return "";
String[] lines = str.split("\n");
if (lines.length <= DEFAULT_LINES_TO_PRINT) return str;
return String.format(
"%s \n...and %d more lines",
String.join("\n", Arrays.copyOfRange(lines, 0, DEFAULT_LINES_TO_PRINT - 1)),
lines.length - DEFAULT_LINES_TO_PRINT
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,12 @@ private static String lockFeed (Request req, Response res) {
} else if (!currentSession.userId.equals(userProfile.getUser_id())) {
// If the session has not expired, and another user has the active session.
LOG.warn("Edit session {} for user {} in progress for feed {}. User {} not permitted to lock feed for {} minutes.", currentSession.sessionId, currentSession.userEmail, currentSession.feedId, userProfile.getEmail(), minutesUntilExpiration);
haltWithMessage(400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
haltWithMessage(req, 400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
return null;
} else {
String sessionId = req.session().id();
LOG.warn("User {} is editing feed {} in another session {}. Cannot create lock for session {}", userProfile.getEmail(), feedId, currentSession.sessionId, sessionId);
haltWithMessage(400, "Warning! You are editing this feed in another session/browser tab!");
haltWithMessage(req, 400, "Warning! You are editing this feed in another session/browser tab!");
return null;
}
}
Expand Down Expand Up @@ -109,7 +109,7 @@ private static String maintainLock(Request req, Response res) {
if (currentSession == null) {
// If there is no current session to maintain, request that user reloads browser.
LOG.warn("No active editor session to maintain {}.", sessionId);
haltWithMessage(400, "No active session for feedId. Please refresh your browser and try editing later.");
haltWithMessage(req, 400, "No active session for feedId. Please refresh your browser and try editing later.");
return null;
} else if (!currentSession.sessionId.equals(sessionId)) {
long secondsSinceLastCheckIn = TimeUnit.MILLISECONDS.toSeconds (System.currentTimeMillis() - currentSession.lastCheckIn);
Expand All @@ -122,10 +122,10 @@ private static String maintainLock(Request req, Response res) {
// If the new current session is held by this user, give them the option to evict the current session /
// unlock the feed.
LOG.warn("User {} already has an active editor session () for feed {}.", userProfile.getEmail(), currentSession.sessionId, currentSession.feedId);
haltWithMessage(400, "Warning! You have an active editing session for this feed underway in a different browser tab.");
haltWithMessage(req, 400, "Warning! You have an active editing session for this feed underway in a different browser tab.");
} else {
LOG.warn("User {} attempted editor session for feed {} while active session underway for user {}.", userProfile.getEmail(), currentSession.feedId, currentSession.userEmail);
haltWithMessage(400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
haltWithMessage(req, 400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
}
return null;
} else {
Expand Down
Loading