Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create Bugsnag reports when errors are encountered #108

Merged
merged 12 commits into from
Jan 7, 2019
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
@@ -336,7 +336,7 @@
<!-- Error reporting -->
<dependency>
<groupId>com.bugsnag</groupId>
<version>[3.0,4.0)</version>
<version>3.3.0</version>
<artifactId>bugsnag</artifactId>
</dependency>

28 changes: 17 additions & 11 deletions src/main/java/com/conveyal/datatools/common/utils/S3Utils.java
Original file line number Diff line number Diff line change
@@ -24,7 +24,7 @@
import java.net.URL;
import java.util.Date;

import static com.conveyal.datatools.common.utils.SparkUtils.haltWithMessage;
import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt;

/**
* Created by landon on 8/2/16.
@@ -34,30 +34,37 @@ public class S3Utils {
private static final Logger LOG = LoggerFactory.getLogger(S3Utils.class);
private static final int REQUEST_TIMEOUT_MSEC = 30 * 1000;

public static String uploadBranding(Request req, String key) throws IOException, ServletException {
public static String uploadBranding(Request req, String key) {
String url;

String s3Bucket = DataManager.getConfigPropertyAsText("application.data.gtfs_s3_bucket");
if (s3Bucket == null) {
haltWithMessage(req, 400, "s3bucket is incorrectly configured on server");
logMessageAndHalt(
req,
500,
"s3bucket is incorrectly configured on server",
new Exception("s3bucket is incorrectly configured on server")
);
}

// Get file from request
if (req.raw().getAttribute("org.eclipse.jetty.multipartConfig") == null) {
MultipartConfigElement multipartConfigElement = new MultipartConfigElement(System.getProperty("java.io.tmpdir"));
req.raw().setAttribute("org.eclipse.jetty.multipartConfig", multipartConfigElement);
}
Part part = req.raw().getPart("file");
String extension = "." + part.getContentType().split("/", 0)[1];
File tempFile = File.createTempFile(key + "_branding", extension);
InputStream inputStream;
String extension = null;
File tempFile = null;
try {
Part part = req.raw().getPart("file");
extension = "." + part.getContentType().split("/", 0)[1];
tempFile = File.createTempFile(key + "_branding", extension);
InputStream inputStream;
inputStream = part.getInputStream();
FileOutputStream out = new FileOutputStream(tempFile);
IOUtils.copy(inputStream, out);
} catch (Exception e) {
} catch (IOException | ServletException e) {
e.printStackTrace();
haltWithMessage(req, 400, "Unable to read uploaded file");
logMessageAndHalt(req, 400, "Unable to read uploaded file");
}

try {
@@ -71,8 +78,7 @@ public static String uploadBranding(Request req, String key) throws IOException,
.withCannedAcl(CannedAccessControlList.PublicRead));
return url;
} catch (AmazonServiceException ase) {
ase.printStackTrace();
haltWithMessage(req, 400, "Error uploading file to S3");
logMessageAndHalt(req, 500, "Error uploading file to S3", ase);
return null;
} finally {
boolean deleted = tempFile.delete();
40 changes: 31 additions & 9 deletions src/main/java/com/conveyal/datatools/common/utils/SparkUtils.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package com.conveyal.datatools.common.utils;

import com.bugsnag.Bugsnag;
import com.bugsnag.Report;
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
@@ -19,6 +21,7 @@
import java.io.IOException;
import java.util.Arrays;

import static com.conveyal.datatools.manager.DataManager.getBugsnag;
import static com.conveyal.datatools.manager.DataManager.getConfigPropertyAsText;
import static spark.Spark.halt;

@@ -36,7 +39,7 @@ public class SparkUtils {
* Write out the supplied file to the Spark response as an octet-stream.
*/
public static HttpServletResponse downloadFile(File file, String filename, Request req, Response res) {
if (file == null) haltWithMessage(req, 404, "File is null");
if (file == null) logMessageAndHalt(req, 404, "File is null");
HttpServletResponse raw = res.raw();
raw.setContentType("application/octet-stream");
raw.setHeader("Content-Disposition", "attachment; filename=" + filename);
@@ -50,10 +53,8 @@ public static HttpServletResponse downloadFile(File file, String filename, Reque
ByteStreams.copy(fileInputStream, outputStream);
// TODO: Is flushing the stream necessary?
outputStream.flush();
} catch (Exception e) {
LOG.error("Could not write file to output stream", e);
e.printStackTrace();
haltWithMessage(req, 500, "Error serving GTFS file", e);
} catch (IOException e) {
logMessageAndHalt(req, 500, "Could not write file to output stream", e);
}
return raw;
}
@@ -91,19 +92,40 @@ public static String formatJSON(String message, int code, Exception e) {
/**
* Wrapper around Spark halt method that formats message as JSON using {@link SparkUtils#formatJSON}.
*/
public static void haltWithMessage(Request request, int statusCode, String message) throws HaltException {
haltWithMessage(request, statusCode, message, null);
public static void logMessageAndHalt(Request request, int statusCode, String message) throws HaltException {
logMessageAndHalt(request, statusCode, message, null);
}

/**
* Wrapper around Spark halt method that formats message as JSON using {@link SparkUtils#formatJSON}. Exception
* Wrapper around Spark halt method that formats message as JSON using {@link SparkUtils#formatJSON}.
* Extra logic occurs for when the status code is >= 500. A Bugsnag report is created if
* Bugsnag is configured.
*/
public static void haltWithMessage(
public static void logMessageAndHalt(
Request request,
int statusCode,
String message,
Exception e
) throws HaltException {
// Note that halting occurred, also print error stacktrace if applicable
if (e != null) e.printStackTrace();
LOG.info("Halting with status code {}. Error message: {}.", statusCode, message);

if (statusCode >= 500) {
LOG.error(message);
evansiroky marked this conversation as resolved.
Show resolved Hide resolved

// create report to notify bugsnag if configured
Bugsnag bugsnag = getBugsnag();
if (bugsnag != null && e != null) {
// create report to send to bugsnag
Report report = bugsnag.buildReport(e);
Auth0UserProfile userProfile = request.attribute("user");
String userEmail = userProfile != null ? userProfile.getEmail() : "no-auth";
report.setUserEmail(userEmail);
bugsnag.notify(report);
}
}

JsonNode json = getObjectNode(message, statusCode, e);
String logString = null;
try {
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@
import java.util.Map;
import java.util.concurrent.TimeUnit;

import static com.conveyal.datatools.common.utils.SparkUtils.haltWithMessage;
import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt;
import static spark.Spark.delete;
import static spark.Spark.post;
import static spark.Spark.put;
@@ -69,12 +69,12 @@ private static String lockFeed (Request req, Response res) {
} else if (!currentSession.userId.equals(userProfile.getUser_id())) {
// If the session has not expired, and another user has the active session.
LOG.warn("Edit session {} for user {} in progress for feed {}. User {} not permitted to lock feed for {} minutes.", currentSession.sessionId, currentSession.userEmail, currentSession.feedId, userProfile.getEmail(), minutesUntilExpiration);
haltWithMessage(req, 400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
logMessageAndHalt(req, 400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
return null;
} else {
String sessionId = req.session().id();
LOG.warn("User {} is editing feed {} in another session {}. Cannot create lock for session {}", userProfile.getEmail(), feedId, currentSession.sessionId, sessionId);
haltWithMessage(req, 400, "Warning! You are editing this feed in another session/browser tab!");
logMessageAndHalt(req, 400, "Warning! You are editing this feed in another session/browser tab!");
return null;
}
}
@@ -109,7 +109,7 @@ private static String maintainLock(Request req, Response res) {
if (currentSession == null) {
// If there is no current session to maintain, request that user reloads browser.
LOG.warn("No active editor session to maintain {}.", sessionId);
haltWithMessage(req, 400, "No active session for feedId. Please refresh your browser and try editing later.");
logMessageAndHalt(req, 400, "No active session for feedId. Please refresh your browser and try editing later.");
return null;
} else if (!currentSession.sessionId.equals(sessionId)) {
long secondsSinceLastCheckIn = TimeUnit.MILLISECONDS.toSeconds (System.currentTimeMillis() - currentSession.lastCheckIn);
@@ -122,10 +122,10 @@ private static String maintainLock(Request req, Response res) {
// If the new current session is held by this user, give them the option to evict the current session /
// unlock the feed.
LOG.warn("User {} already has an active editor session () for feed {}.", userProfile.getEmail(), currentSession.sessionId, currentSession.feedId);
haltWithMessage(req, 400, "Warning! You have an active editing session for this feed underway in a different browser tab.");
logMessageAndHalt(req, 400, "Warning! You have an active editing session for this feed underway in a different browser tab.");
} else {
LOG.warn("User {} attempted editor session for feed {} while active session underway for user {}.", userProfile.getEmail(), currentSession.feedId, currentSession.userEmail);
haltWithMessage(req, 400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
logMessageAndHalt(req, 400, getLockedFeedMessage(currentSession, minutesUntilExpiration));
}
return null;
} else {
Original file line number Diff line number Diff line change
@@ -10,6 +10,7 @@
import com.conveyal.gtfs.loader.JdbcTableWriter;
import com.conveyal.gtfs.loader.Table;
import com.conveyal.gtfs.model.Entity;
import com.conveyal.gtfs.util.InvalidNamespaceException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.dbutils.DbUtils;
@@ -20,12 +21,13 @@
import spark.Response;

import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;

import static com.conveyal.datatools.common.utils.SparkUtils.formatJSON;
import static com.conveyal.datatools.common.utils.SparkUtils.haltWithMessage;
import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt;
import static com.conveyal.datatools.editor.controllers.EditorLockController.sessionsForFeedIds;
import static spark.Spark.delete;
import static spark.Spark.options;
@@ -97,15 +99,17 @@ private String deleteTripsForPattern(Request req, Response res) {
// NOTE: This is a string pattern ID, not the integer ID that all other HTTP endpoints use.
String patternId = req.params("id");
if (patternId == null) {
haltWithMessage(req, 400, "Must provide valid pattern_id");
logMessageAndHalt(req, 400, "Must provide valid pattern_id");
}
try {
JdbcTableWriter tableWriter = new JdbcTableWriter(Table.TRIPS, datasource, namespace);
int deletedCount = tableWriter.deleteWhere("pattern_id", patternId, true);
return formatJSON(String.format("Deleted %d.", deletedCount), 200);
} catch (InvalidNamespaceException e) {
logMessageAndHalt(req, 400, "Invalid namespace");
return null;
} catch (Exception e) {
e.printStackTrace();
haltWithMessage(req, 400, "Error deleting entity", e);
logMessageAndHalt(req, 500, "Error deleting entity", e);
return null;
} finally {
LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime);
@@ -134,9 +138,10 @@ private String deleteMultipleTrips(Request req, Response res) {
// Commit the transaction after iterating over trip IDs (because the deletes where made without autocommit).
tableWriter.commit();
LOG.info("Deleted {} trips", tripIds.length);
} catch (InvalidNamespaceException e) {
logMessageAndHalt(req, 400, "Invalid namespace");
} catch (Exception e) {
e.printStackTrace();
haltWithMessage(req, 400, "Error deleting entity", e);
logMessageAndHalt(req, 500, "Error deleting entity", e);
} finally {
LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime);
}
@@ -157,8 +162,7 @@ private String deleteOne(Request req, Response res) {
return formatJSON(String.valueOf("Deleted one."), 200);
}
} catch (Exception e) {
e.printStackTrace();
haltWithMessage(req, 400, "Error deleting entity", e);
logMessageAndHalt(req, 400, "Error deleting entity", e);
} finally {
LOG.info("Delete operation took {} msec", System.currentTimeMillis() - startTime);
}
@@ -178,13 +182,7 @@ private String uploadEntityBranding (Request req, Response res) {
url = S3Utils.uploadBranding(req, String.join("_", classToLowercase, idAsString));
} catch (HaltException e) {
// Do not re-catch halts thrown for exceptions that have already been caught.
LOG.error("Halt encountered", e);
throw e;
} catch (Exception e) {
String message = String.format("Could not upload branding for %s id=%d", classToLowercase, id);
LOG.error(message);
e.printStackTrace();
haltWithMessage(req, 400, message, e);
}
String namespace = getNamespaceAndValidateSession(req);
// Prepare json object for response. (Note: this is not the full entity object, but just the URL field).
@@ -201,9 +199,8 @@ private String uploadEntityBranding (Request req, Response res) {
preparedStatement.executeUpdate();
connection.commit();
return jsonObject.toString();
} catch (SQLException e) {
e.printStackTrace();
haltWithMessage(req, 400, "Could not update branding url", e);
} catch (Exception e) {
logMessageAndHalt(req, 500, "Could not update branding url", e);
return null;
} finally {
DbUtils.closeQuietly(connection);
@@ -220,22 +217,26 @@ private String createOrUpdate(Request req, Response res) {
// Check if an update or create operation depending on presence of id param
// This needs to be final because it is used in a lambda operation below.
if (req.params("id") == null && req.requestMethod().equals("PUT")) {
haltWithMessage(req, 400, "Must provide id");
logMessageAndHalt(req, 400, "Must provide id");
}
final boolean isCreating = req.params("id") == null;
String namespace = getNamespaceAndValidateSession(req);
Integer id = getIdFromRequest(req);
// Get the JsonObject
// Save or update to database
try {
JdbcTableWriter tableWriter = new JdbcTableWriter(table, datasource, namespace);
String jsonBody = req.body();
if (isCreating) {
return tableWriter.create(req.body(), true);
return tableWriter.create(jsonBody, true);
} else {
return tableWriter.update(id, req.body(), true);
return tableWriter.update(id, jsonBody, true);
}
} catch (InvalidNamespaceException e) {
logMessageAndHalt(req, 400, "Invalid namespace");
} catch (IOException e) {
logMessageAndHalt(req, 400, "Invalid json", e);
} catch (Exception e) {
e.printStackTrace();
haltWithMessage(req, 400, "Operation failed.", e);
logMessageAndHalt(req, 500, "An error was encountered while trying to save to the database", e);
} finally {
String operation = isCreating ? "Create" : "Update";
LOG.info("{} operation took {} msec", operation, System.currentTimeMillis() - startTime);
@@ -252,31 +253,31 @@ private static String getNamespaceAndValidateSession(Request req) {
String sessionId = req.queryParams("sessionId");
FeedSource feedSource = Persistence.feedSources.getById(feedId);
if (feedSource == null) {
haltWithMessage(req, 400, "Feed ID is invalid");
logMessageAndHalt(req, 400, "Feed ID is invalid");
}
// FIXME: Switch to using spark session IDs rather than query parameter?
// String sessionId = req.session().id();
EditorLockController.EditorSession currentSession = sessionsForFeedIds.get(feedId);
if (currentSession == null) {
haltWithMessage(req, 400, "There is no active editing session for user.");
logMessageAndHalt(req, 400, "There is no active editing session for user.");
}
if (!currentSession.sessionId.equals(sessionId)) {
// This session does not match the current active session for the feed.
Auth0UserProfile userProfile = req.attribute("user");
if (currentSession.userEmail.equals(userProfile.getEmail())) {
LOG.warn("User {} already has editor session {} for feed {}. Same user cannot make edits on session {}.", currentSession.userEmail, currentSession.sessionId, feedId, req.session().id());
haltWithMessage(req, 400, "You have another editing session open for " + feedSource.name);
logMessageAndHalt(req, 400, "You have another editing session open for " + feedSource.name);
} else {
LOG.warn("User {} already has editor session {} for feed {}. User {} cannot make edits on session {}.", currentSession.userEmail, currentSession.sessionId, feedId, userProfile.getEmail(), req.session().id());
haltWithMessage(req, 400, "Somebody else is editing the " + feedSource.name + " feed.");
logMessageAndHalt(req, 400, "Somebody else is editing the " + feedSource.name + " feed.");
}
} else {
currentSession.lastEdit = System.currentTimeMillis();
LOG.info("Updating session {} last edit time to {}", sessionId, currentSession.lastEdit);
}
String namespace = feedSource.editorNamespace;
if (namespace == null) {
haltWithMessage(req, 400, "Cannot edit feed that has not been snapshotted (namespace is null).");
logMessageAndHalt(req, 400, "Cannot edit feed that has not been snapshotted (namespace is null).");
}
return namespace;
}
@@ -294,7 +295,7 @@ private Integer getIdFromRequest(Request req) {
id = Integer.valueOf(req.params("id"));
} catch (NumberFormatException e) {
LOG.error("ID provided must be an integer", e);
haltWithMessage(req, 400, "ID provided is not a number");
logMessageAndHalt(req, 400, "ID provided is not a number");
}
}
return id;
Loading