Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Experiments with user auth/db access #811

Draft
wants to merge 5 commits into
base: dev
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.conveyal.analysis.controllers.BrokerController;
import com.conveyal.analysis.controllers.BundleController;
import com.conveyal.analysis.controllers.DataSourceController;
import com.conveyal.analysis.controllers.DatabaseController;
import com.conveyal.analysis.controllers.GtfsController;
import com.conveyal.analysis.controllers.HttpController;
import com.conveyal.analysis.controllers.OpportunityDatasetController;
Expand Down Expand Up @@ -96,6 +97,7 @@ public List<HttpController> standardHttpControllers () {
new BrokerController(broker, eventBus),
new UserActivityController(taskScheduler),
new DataSourceController(fileStorage, database, taskScheduler, censusExtractor),
new DatabaseController(database),
new WorkerProxyController(broker)
);
}
Expand Down
45 changes: 30 additions & 15 deletions src/main/java/com/conveyal/analysis/components/HttpApi.java
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,21 @@ private spark.Service configureSparkService () {
LOG.info("Analysis server will listen for HTTP connections on port {}.", config.serverPort());
spark.Service sparkService = spark.Service.ignite();
sparkService.port(config.serverPort());
//sparkService.threadPool(1000);

// Set up TLS (HTTPS). Unfortunately Spark HTTP only accepts String paths to keystore files.
// We want to build a Keystore instance programmatically from PEM files.
// Digging through the Spark source code it seems extremely convoluted to directly inject a Keystore instance.
// sparkService.secure();
// Usage examples at:
// https://github.com/Hakky54/sslcontext-kickstart/blob/master/sslcontext-kickstart-for-pem/src/test/java/nl/altindag/ssl/util/PemUtilsShould.java
// Dependency:
// Tools to load PEM files into Java Keystore (so we don't have to use arcane Java keytool)
// implementation 'io.github.hakky54:sslcontext-kickstart-for-pem:7.4.1'

// Serve up UI files. staticFileLocation("vector-client") inside classpath will not see changes to files.
// Note that this eliminates the need for CORS headers and eliminates CORS preflight request latency.
sparkService.externalStaticFileLocation("../r5/src/main/resources/vector-client");

// Specify actions to take before the main logic of handling each HTTP request.
sparkService.before((req, res) -> {
Expand All @@ -87,10 +102,10 @@ private spark.Service configureSparkService () {
// Set CORS headers to allow requests to this API server from a frontend hosted on a different domain.
// This used to be hardwired to Access-Control-Allow-Origin: * but that leaves the server open to XSRF
// attacks when authentication is disabled (e.g. when running locally).
res.header("Access-Control-Allow-Origin", config.allowOrigin());
// For caching, signal to the browser that responses may be different based on origin.
// TODO clarify why this is important, considering that normally all requests come from the same origin.
res.header("Vary", "Origin");
// res.header("Access-Control-Allow-Origin", config.allowOrigin());
// // For caching, signal to the browser that responses may be different based on origin.
// // TODO clarify why this is important, considering that normally all requests come from the same origin.
// res.header("Vary", "Origin");

// The default MIME type is JSON. This will be overridden by the few controllers that do not return JSON.
res.type("application/json");
Expand Down Expand Up @@ -121,17 +136,17 @@ private spark.Service configureSparkService () {

// Handle CORS preflight requests (which are OPTIONS requests).
// See comment above about Access-Control-Allow-Origin
sparkService.options("/*", (req, res) -> {
// Cache the preflight response for up to one day (the maximum allowed by browsers)
res.header("Access-Control-Max-Age", "86400");
res.header("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE,OPTIONS");
// Allowing credentials is necessary to send an Authorization header
res.header("Access-Control-Allow-Credentials", "true");
res.header("Access-Control-Allow-Headers", "Accept,Authorization,Content-Type,Origin," +
"X-Requested-With,Content-Length,X-Conveyal-Access-Group"
);
return "OK";
});
// sparkService.options("/*", (req, res) -> {
// // Cache the preflight response for up to one day (the maximum allowed by browsers)
// res.header("Access-Control-Max-Age", "86400");
// res.header("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE,OPTIONS");
// // Allowing credentials is necessary to send an Authorization header
// res.header("Access-Control-Allow-Credentials", "true");
// res.header("Access-Control-Allow-Headers", "Accept,Authorization,Content-Type,Origin," +
// "X-Requested-With,Content-Length,X-Conveyal-Access-Group"
// );
// return "OK";
// });

// Allow client to fetch information about the backend build version.
sparkService.get(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.conveyal.analysis.components.broker.Broker;
import com.conveyal.analysis.components.eventbus.ErrorLogger;
import com.conveyal.analysis.components.eventbus.EventBus;
import com.conveyal.analysis.controllers.AuthTokenController;
import com.conveyal.analysis.controllers.HttpController;
import com.conveyal.analysis.controllers.LocalFilesController;
import com.conveyal.analysis.grids.SeamlessCensusGridExtractor;
Expand Down Expand Up @@ -31,14 +32,16 @@ public LocalBackendComponents () {
// New (October 2019) DB layer, this should progressively replace the Persistence class
database = new AnalysisDB(config);
eventBus = new EventBus(taskScheduler);
authentication = new LocalAuthentication();
final TokenAuthentication tokenAuthentication = new TokenAuthentication(database);
authentication = tokenAuthentication;
// TODO add nested LocalWorkerComponents here, to reuse some components, and pass it into the LocalWorkerLauncher?
workerLauncher = new LocalWorkerLauncher(config, fileStorage, gtfsCache, osmCache);
broker = new Broker(config, fileStorage, eventBus, workerLauncher);
censusExtractor = new SeamlessCensusGridExtractor(config);
// Instantiate the HttpControllers last, when all the components except the HttpApi are already created.
List<HttpController> httpControllers = standardHttpControllers();
httpControllers.add(new LocalFilesController(fileStorage));
httpControllers.add(new AuthTokenController(tokenAuthentication));
httpApi = new HttpApi(fileStorage, authentication, eventBus, config, httpControllers);
// compute = new LocalCompute();
// persistence = persistence(local_Mongo)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
package com.conveyal.analysis.components;

import com.conveyal.analysis.AnalysisServerException;
import com.conveyal.analysis.UserPermissions;
import com.conveyal.analysis.persistence.AnalysisDB;
import com.mongodb.client.MongoCollection;
import org.bson.Document;
import org.bson.types.Binary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Request;

import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import java.lang.invoke.MethodHandles;
import java.security.SecureRandom;
import java.security.spec.KeySpec;
import java.util.Arrays;
import java.util.Base64;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;

import static com.conveyal.analysis.AnalysisServerException.Type.UNAUTHORIZED;
import static com.mongodb.client.model.Filters.eq;

/**
* Simple bearer token authentication storing hashed passwords in database.
* Allows direct management of users and permissions.
*/
public class TokenAuthentication implements Authentication {

private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

private final MongoCollection<Document> users;

/**
* Bundles UserPermissions together with a last read time to allow expiry.
*/
private static class TokenValue {
long lastUsed = System.currentTimeMillis();
final UserPermissions userPermissions;
public TokenValue(UserPermissions userPermissions) {
this.userPermissions = userPermissions;
}
}

private Map<String, TokenValue> userForToken = new HashMap<>();

public TokenAuthentication (AnalysisDB database) {
// TODO verify that sharing a MongoCollection across threads is safe
this.users = database.getBsonCollection("users");
}

@Override
public UserPermissions authenticate(Request request) {
String token = request.headers("authorization");
// Some places such as MopboxGL do not make it easy to add headers, so also accept token in query parameter.
// The MapboxGL transformUrl setting seems to be missing from recent versions of the library.
if (token == null) {
token = request.queryParams("token");
}
if (token == null) {
throw new AnalysisServerException(UNAUTHORIZED, "Authorization token mising.", 401);
}
if ("sesame".equalsIgnoreCase(token)) {
return new UserPermissions("local", true, "local");
}
UserPermissions userPermissions = userForToken(token);
if (userPermissions == null) {
throw new AnalysisServerException(UNAUTHORIZED, "Invalid authorization token.", 401);
} else {
return userPermissions;
}
}

/**
* TODO is SecureRandom a sufficiently secure source of randomness when used this way?
* Should we be creating a new instance of SecureRandom each time or reusing it?
* Do not use basic Base64 encoding since it contains some characters that are invalid in URLs.
* @return A url-safe representation of 32 random bytes
*/
public static String generateToken () {
Random random = new SecureRandom();
byte[] tokenBytes = new byte[32];
random.nextBytes(tokenBytes);
String token = Base64.getUrlEncoder().encodeToString(tokenBytes);
return token;
}

public UserPermissions userForToken (String token) {
TokenValue tokenValue = null;
synchronized (userForToken) {
tokenValue = userForToken.get(token);
if (tokenValue == null) {
return null;
} else {
tokenValue.lastUsed = System.currentTimeMillis();
return tokenValue.userPermissions;
}
}
}

/**
* @return byte[] representing the supplied password hashed with the supplied salt.
*/
private byte[] hashWithSalt (String password, byte[] salt) {
try {
// Note Java char is 16-bit Unicode (not byte, which requires a specific encoding like UTF8).
// 256 bit key length is 32 bytes.
KeySpec keySpec = new PBEKeySpec(password.toCharArray(), salt, 65536, 256);
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
byte[] hash = keyFactory.generateSecret(keySpec).getEncoded();
return hash;
// return Base64.getEncoder().encodeToString(hash);
} catch (Exception e) {
throw new RuntimeException("Exception:", e);
}
}

/**
* Create a user with the specified password. Stores the random salt and hashed password in the database.
*/
public void createUser (String email, String group, String password) {
// TODO validate password entropy
Random random = new Random();
byte[] salt = new byte[32];
random.nextBytes(salt);
byte[] hash = hashWithSalt(password, salt);
// Due to Mongo's nature it may not be possible to verify whether the user already exists.
// Once the write is finalized though, this will produce E11000 duplicate key error.
// We may want to allow updating a user by simply calling this HTTP API method more than once.
users.insertOne(new Document("_id", email)
.append("group", group)
.append("salt", new Binary(salt))
.append("hash", new Binary(hash))
);
}

/**
* Create a new token, replacing any existing one for the same user (email) as long as the password is correct.
* @return a new token, or null if the supplied password is incorrect.
*/
public String makeToken (String email, String password) {
Document userDocument = users.find(eq("_id", email)).first();
if (userDocument == null) {
throw new IllegalArgumentException("User unknown: " + email);
}
Binary salt = (Binary) userDocument.get("salt");
Binary hash = (Binary) userDocument.get("hash");
String group = userDocument.getString("group");
byte[] hashForComparison = hashWithSalt(password, salt.getData());
if (Arrays.equals(hash.getData(), hashForComparison)) {
// Maybe invalidation is pointless and we can continue to return the same key indefinitely.
String token = generateToken();
synchronized (userForToken) {
userForToken.put(token, new TokenValue(new UserPermissions(email, false, group)));
return token;
}
} else {
return null;
}
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package com.conveyal.analysis.controllers;

import com.conveyal.analysis.AnalysisServerException;
import com.conveyal.analysis.UserPermissions;
import com.conveyal.analysis.components.TokenAuthentication;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Request;
import spark.Response;

import java.lang.invoke.MethodHandles;
import java.util.Map;

import static com.conveyal.analysis.AnalysisServerException.Type.UNAUTHORIZED;
import static com.conveyal.analysis.util.JsonUtil.toJson;

/**
* HTTP API Controller that handles user accounts and authentication.
* Serve up tokens for valid users. Allow admin users to create new users and set their passwords.
* TODO add rate limiting and map size limiting (limit number of concurrent users in case of attacks).
*/
public class AuthTokenController implements HttpController {

private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

private final TokenAuthentication tokenAuthentication;

public AuthTokenController (TokenAuthentication tokenAuthentication) {
this.tokenAuthentication = tokenAuthentication;
}

/**
* Create a user with the specified password. Stores the random salt and hashed password in the database.
*/
private Object createUser (Request req, Response res) {
if (!UserPermissions.from(req).admin) {
throw new AnalysisServerException(UNAUTHORIZED, "Only admin users can create new users.", 401);
}
String email = req.queryParams("email");
String group = req.queryParams("group");
String password = req.queryParams("password");
tokenAuthentication.createUser(email, group, password);
res.status(201);
return "CREATED"; // alternatively UPDATED or FAILED
}

/**
* Create a new token, replacing any existing one for the same user (email).
*/
private Map getTokenForEmail (Request req, Response res) {
// These should probably be in the body not URL, to prevent them from appearing as plaintext in history.
String email = req.queryParams("email");
String password = req.queryParams("password");
// Crude rate limiting, might just lead to connections piling up in event of attack.
// sleepSeconds(2);
// TODO clear out any expired tokens, limiting to one or two per email
String token = tokenAuthentication.makeToken(email, password);
if (token == null) {
throw new AnalysisServerException(UNAUTHORIZED, "Incorrect email/password combination.", 401);
} else {
return Map.of("token", token);
}
}

// Testing with Apache bench shows some stalling
// -k keepalive connections fails immediately

// Example usage:
// curl -H "authorization: sesame" -X POST "localhost:7070/api/user?email=abyrd@conveyal.com&group=local&password=testpass"
// 201 CREATED
// curl "localhost:7070/token?email=abyrd@conveyal.com&password=testpass"
// 200 {"token":"LHKUz6weI32mEk3SXBfGZFvPP3P9FZq8xboJdPPBIdo="}
// curl -H "authorization: abyrd@conveyal.com Jx5Re2/fl1AAISeeMzaCJOy8OCRO6MVOAJLSN7/tkSg=" "localhost:7070/api/activity"
// 200 {"systemStatusMessages":[],"taskBacklog":0,"taskProgress":[]}

@Override
public void registerEndpoints (spark.Service sparkService) {
// Token endpoint is outside authenticated /api prefix because it's the means to get authentication tokens.
sparkService.get("/token", this::getTokenForEmail, toJson);
// User endpoint is inside the authenticated /api prefix because it is only accessible to admin users.
sparkService.post("/api/user", this::createUser);
}

}
Loading