From b3a8648b12fcc576e1361ad99814cfcf8460320a Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 15:39:34 +0100 Subject: [PATCH 01/10] add full api spec --- DESCRIPTION | 1 + R/api.R | 10 +- R/router.R | 8 + inst/schema/ErrorDetail.schema.json | 2 +- inst/schema/ResponseFailure.schema.json | 2 +- inst/spec.yaml | 384 ++++++++++++++++++++++++ 6 files changed, 400 insertions(+), 7 deletions(-) create mode 100644 inst/spec.yaml diff --git a/DESCRIPTION b/DESCRIPTION index 376e3c3..ecb7262 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -24,6 +24,7 @@ Imports: plotly, plumber, porcelain, + redoc, rlang, stringr, tibble diff --git a/R/api.R b/R/api.R index 4986229..15da882 100644 --- a/R/api.R +++ b/R/api.R @@ -96,12 +96,12 @@ get_xcol <- function(parsed) { target_delete_dataset <- function(name, req) { session_id <- get_or_create_session_id(req) path <- file.path("uploads", session_id, name) - if (!file.exists(path)) { - porcelain::porcelain_stop(paste("Did not find dataset with name:", name), - code = "DATASET_NOT_FOUND", status_code = 404L) + if (file.exists(path)) { + logger::log_info(paste("Deleting dataset:", name)) + fs::dir_delete(path) + } else { + logger::log_info(paste("No dataset found with name", name)) } - logger::log_info(paste("Deleting dataset: ", name)) - fs::dir_delete(path) jsonlite::unbox(name) } diff --git a/R/router.R b/R/router.R index dc41e75..7e77cf4 100644 --- a/R/router.R +++ b/R/router.R @@ -50,6 +50,14 @@ build_routes <- function(cookie_key = plumber::random_cookie_key(), pr$handle(get_datasets()) pr$handle(get_trace()) pr$handle(get_individual()) + api <- yaml::read_yaml(file.path(system.file("spec.yaml", + package = "serovizr")), + eval.expr = FALSE) + pr$setApiSpec(api) + pr$setDocs("redoc") + pr$mount("/schema", PlumberStatic$new(file.path(system.file("schema", + package = "serovizr")))) + pr } get_root <- function() { diff --git a/inst/schema/ErrorDetail.schema.json b/inst/schema/ErrorDetail.schema.json index b476490..8494da7 100644 --- a/inst/schema/ErrorDetail.schema.json +++ b/inst/schema/ErrorDetail.schema.json @@ -9,6 +9,6 @@ "type": ["string", "null"] } }, - "additionalProperties": true, + "additionalProperties": false, "required": [ "error", "detail" ] } diff --git a/inst/schema/ResponseFailure.schema.json b/inst/schema/ResponseFailure.schema.json index 71e9a79..828030a 100644 --- a/inst/schema/ResponseFailure.schema.json +++ b/inst/schema/ResponseFailure.schema.json @@ -16,5 +16,5 @@ } }, "required": ["status", "data", "errors"], - "additionalProperties": true + "additionalProperties": false } diff --git a/inst/spec.yaml b/inst/spec.yaml new file mode 100644 index 0000000..0f46dad --- /dev/null +++ b/inst/spec.yaml @@ -0,0 +1,384 @@ +openapi: "3.1.0" +info: + version: 0.0.0 + title: serovizr + description: R API for SeroViz app + license: + name: GPL (>= 3) + url: https://www.gnu.org/licenses/gpl-3.0.en.html +servers: + - url: https://seroviz.seroanalytics.org/api + - url: http://localhost:8888 +basePath: "/api" +paths: + /: + get: + description: Returns welcome message + responses: + '200': + description: Welcome message + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + type: string + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + examples: + one: + value: + status: 'success' + data: 'Welcome to serovizr' + /version/: + get: + description: Returns current version of the API package + responses: + '200': + description: Version number + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + $ref: 'schema/Version.schema.json' + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + examples: + one: + value: + status: 'success' + data: '1.0.0' + /datasets/: + get: + description: List available datasets for the given session id + responses: + '200': + description: List of dataset names + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + $ref: 'schema/DatasetNames.schema.json' + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + examples: + one: + value: + status: 'success' + data: + - dataset_one + - another_dataset + /dataset/{name}: + get: + description: Returns metadata about the named dataset + parameters: + - in: path + name: name + schema: + type: string + required: true + responses: + '200': + description: Dataset metadata + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + $ref: 'schema/DatasetMetadata.schema.json' + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + examples: + one: + value: + status: 'success' + data: + variables: + - name: sex + levels: + - M + - F + biomarkers: + - ab_unit + - ab_spike + xcol: day_of_study + '404': + description: Dataset with the given name not found + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' + delete: + description: Delete a dataset if it exists + parameters: + - in: path + name: name + schema: + type: string + required: true + responses: + '200': + description: Name of deleted dataset + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + type: string + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + examples: + one: + value: + status: 'success' + data: 'mydataset' + errors: null + /dataset/: + post: + description: Upload a new dataset + requestBody: + description: Name of dataset and column to use to index time series + content: + application/json: + schema: + type: object + properties: + xcol: + type: string + name: + type: string + file: + type: file + additionalProperties: false + responses: + '200': + description: Returns name of uploaded dataset + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + $ref: 'schema/UploadResult.schema.json' + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + examples: + one: + value: + status: 'success' + data: 'mynewdataset' + errors: null + '400': + description: Returns dataset validation errors + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' + examples: + duplicate: + value: + status: 'failure' + data: null + errors: + - error: 'BAD_REQUEST' + detail: 'Missing required columns: biomarker, time' + /dataset/{name}/trace/{biomarker}/: + get: + description: Returns data series for a biomarker + parameters: + - in: path + name: name + schema: + type: string + required: true + description: Dataset name + - in: path + name: biomarker + schema: + type: string + required: true + description: Biomarker name + - in: query + schema: + type: string + name: disaggregate + description: Column name to disaggregate by + - in: query + name: filter + schema: + type: string + description: Column name(s) and value(s) to filter by, in the format 'col:val+col2:val2' + - in: query + name: scale + schema: + type: string + description: One of 'log', 'log2', or 'natural' + - in: query + name: method + schema: + type: string + description: Specify a spline function; one of 'loess' or 'gam'. By default, loess will be used if n < 1000, gam otherwise + - in: query + name: span + schema: + type: number + description: Argument to be passed to 'loess' + - in: query + name: k + schema: + type: number + description: Argument to be passed to 'gam' + responses: + '200': + description: Raw and spline data series, with any warnings generated by running the spline model + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + $ref: 'schema/DataSeries.schema.json' + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + /dataset/{name}/individual/{pidcol}/: + get: + description: Returns plotly config for individual trajectories + parameters: + - in: path + name: name + schema: + type: string + required: true + description: Dataset name + - in: path + name: pidcol + schema: + type: string + required: true + description: Name of the column containing individual ids + - in: query + schema: + type: string + name: scale + description: One of 'log', 'log2' or 'natural' + - in: query + schema: + type: string + name: color + description: Column name to color returned traces by + - in: query + schema: + type: string + name: linetype + description: Column name to determine the trace linetypes + - in: query + schema: + type: string + name: filter + description: Column name(s) and value(s) to filter by, in the format 'col:val+col2:val2' + - in: query + schema: + type: number + name: page + description: Page of results to return (there are 20 individuals to a page) + responses: + '200': + description: Plotly data and config objects for displaying multi-facet plot of individual trajectories + content: + application/json: + schema: + type: object + properties: + status: + type: string + const: 'success' + data: + $ref: 'schema/Plotly.schema.json' + errors: + type: null + required: + - status + - data + - errors + additionalProperties: false + '400': + description: Validation errors + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' + examples: + id: + value: + errors: + - error: 'BAD_REQUEST' + details: Id column 'person_id' not found. + '404': + description: Dataset not found + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' From 891237a1871acdf6aa7c8e102cc47771f185de6e Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 15:49:33 +0100 Subject: [PATCH 02/10] update readme --- README.md | 30 ++++++++++++++++++++++++++---- 1 file changed, 26 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index aefc9c9..8f71acc 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,29 @@ ![GitHub License](https://img.shields.io/github/license/seroanalytics/serovizr) -R API for the SeroViz app. Based on the [porcelain](https://github.com/reside-ic/porcelain) framework. +R API for the SeroViz app. Based on the [porcelain](https://github.com/reside-ic/porcelain) and [plumber](https://github.com/rstudio/plumber) frameworks. + +## API Specification +Docs are available when running the API locally on port 8888, via +``` +http://127.0.0.1:8888/__docs__/ +``` + +The easiest way to run the API locally is via Docker: + +``` + docker run -p 8888:8888 seroanalytics/serovizr:main +``` + +Alternatively, to run from R, first clone this repo and then from this directory run: + +```r + devtools::load_all() + serovizr:::main() +``` + +The docs are maintained via an [openapi](https://www.openapis.org/) specification +contained in `inst/spec.yaml`, and [JSON Schema](https://json-schema.org/) files in `inst/schema`. ## Developing Install dependencies with: @@ -35,20 +57,20 @@ devtools::test() To build a Docker image: -``` r +``` ./docker/build ``` To push to Dockerhub: -``` r +``` ./docker/push ``` To run a built image: -``` r +``` docker run -p 8888:8888 seroanalytics/serovizr: ``` From 3abfbe52e7fb7e59f2c5492728a0d2894450a8b5 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 15:51:09 +0100 Subject: [PATCH 03/10] add redoc to dockerfile --- docker/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/Dockerfile b/docker/Dockerfile index 097c1c6..22da199 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -23,6 +23,7 @@ RUN install_packages --repo=https://mrc-ide.r-universe.dev \ jsonvalidate \ plotly \ plumber \ + redoc \ remotes \ Rook \ stringr \ From f1ad4ab0441c405c40224b6aedb874f9258fc1e4 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 15:54:10 +0100 Subject: [PATCH 04/10] require redoc --- R/router.R | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/R/router.R b/R/router.R index 7e77cf4..0f69f37 100644 --- a/R/router.R +++ b/R/router.R @@ -54,8 +54,9 @@ build_routes <- function(cookie_key = plumber::random_cookie_key(), package = "serovizr")), eval.expr = FALSE) pr$setApiSpec(api) + require(redoc) pr$setDocs("redoc") - pr$mount("/schema", PlumberStatic$new(file.path(system.file("schema", + pr$mount("/schema", plumber::PlumberStatic$new(file.path(system.file("schema", package = "serovizr")))) pr } From e0760dcea8490d66f8aabafe5ffc775e508aeaa9 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 15:58:51 +0100 Subject: [PATCH 05/10] fix test --- R/router.R | 2 +- tests/testthat/test-router.R | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/R/router.R b/R/router.R index 0f69f37..a2db984 100644 --- a/R/router.R +++ b/R/router.R @@ -54,7 +54,7 @@ build_routes <- function(cookie_key = plumber::random_cookie_key(), package = "serovizr")), eval.expr = FALSE) pr$setApiSpec(api) - require(redoc) + library(redoc) pr$setDocs("redoc") pr$mount("/schema", plumber::PlumberStatic$new(file.path(system.file("schema", package = "serovizr")))) diff --git a/tests/testthat/test-router.R b/tests/testthat/test-router.R index 8577e7e..9ef95b3 100644 --- a/tests/testthat/test-router.R +++ b/tests/testthat/test-router.R @@ -58,12 +58,13 @@ test_that("DELETE /dataset", { expect_equal(body$data, "testdataset") }) -test_that("DELETE /dataset returns 404 if not found", { +test_that("DELETE /dataset returns 200 if dataset doesn't exist", { router <- build_routes(cookie_key) res <- router$call(make_req("DELETE", "/dataset/testdataset/", HTTP_COOKIE = cookie)) - expect_equal(res$status, 404) + expect_equal(res$status, 200) + expect_equal(body$data, "testdataset") }) test_that("GET /datasets", { From 18bc08b09ecd1b434a2b8e363d25a1addf8d1c8e Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 16:07:48 +0100 Subject: [PATCH 06/10] tidy up router --- R/router.R | 134 ++++++++++------------------------- R/routes.R | 71 +++++++++++++++++++ tests/testthat/test-router.R | 1 + 3 files changed, 109 insertions(+), 97 deletions(-) create mode 100644 R/routes.R diff --git a/R/router.R b/R/router.R index a2db984..23d48f0 100644 --- a/R/router.R +++ b/R/router.R @@ -5,7 +5,37 @@ build_routes <- function(cookie_key = plumber::random_cookie_key(), } plumber::options_plumber(trailingSlash = TRUE) pr <- porcelain::porcelain$new(validate = TRUE) - pr$registerHook(stage = "preserialize", function(data, req, res, value) { + pr$registerHook(stage = "preserialize", preserialize_hook(cache)) + pr$registerHooks(plumber::session_cookie(cookie_key, + name = "serovizr", + path = "/")) + + pr$filter("logger", logging_filter) + + pr$handle(get_root()) + pr$handle(get_version()) + # porcelain doesn't support multipart form content yet; for now wire this + # endpoint up using plumber arguments instead + pr$handle("POST", "/api/dataset/", target_post_dataset, + serializer = plumber::serializer_unboxed_json(null = "null")) + pr$handle(options_dataset()) + pr$handle(delete_dataset()) + pr$handle(get_dataset()) + pr$handle(get_datasets()) + pr$handle(get_trace()) + pr$handle(get_individual()) + setup_docs(pr) +} + +logging_filter <- function(req, res) { + logger::log_info(paste(as.character(Sys.time()), "-", + req$REQUEST_METHOD, req$PATH_INFO, "-", + req$HTTP_USER_AGENT, "@", req$REMOTE_ADDR, "\n")) + plumber::forward() +} + +preserialize_hook <- function(cache) { + function(data, req, res, value) { if (!is.null(req$HTTP_ORIGIN) && req$HTTP_ORIGIN %in% c("http://localhost:3000", "http://localhost")) { # allow local app and integration tests to access endpoints @@ -26,113 +56,23 @@ build_routes <- function(cookie_key = plumber::random_cookie_key(), }, error = function(e) logger::log_error(conditionMessage(e))) value - }) - - pr$registerHooks(plumber::session_cookie(cookie_key, - name = "serovizr", - path = "/")) - - pr$filter("logger", function(req, res) { - logger::log_info(paste(as.character(Sys.time()), "-", - req$REQUEST_METHOD, req$PATH_INFO, "-", - req$HTTP_USER_AGENT, "@", req$REMOTE_ADDR, "\n")) - plumber::forward() - }) + } +} - pr$handle(get_root()) - pr$handle(get_version()) - pr$handle("POST", "/api/dataset/", - function(req, res) target_post_dataset(req, res), - serializer = plumber::serializer_unboxed_json(null = "null")) - pr$handle(options_dataset()) - pr$handle(delete_dataset()) - pr$handle(get_dataset()) - pr$handle(get_datasets()) - pr$handle(get_trace()) - pr$handle(get_individual()) +setup_docs <- function(pr) { api <- yaml::read_yaml(file.path(system.file("spec.yaml", package = "serovizr")), eval.expr = FALSE) pr$setApiSpec(api) + # this is a bit annoying, but setDocs fails if the package isn't + # already loaded library(redoc) pr$setDocs("redoc") pr$mount("/schema", plumber::PlumberStatic$new(file.path(system.file("schema", - package = "serovizr")))) + package = "serovizr")))) pr } -get_root <- function() { - porcelain::porcelain_endpoint$new( - "GET", - "/api/", - target_get_root, - returning = porcelain::porcelain_returning_json()) -} - -get_version <- function() { - porcelain::porcelain_endpoint$new( - "GET", - "/api/version/", - target_get_version, - returning = porcelain::porcelain_returning_json("Version")) -} - -get_dataset <- function() { - porcelain::porcelain_endpoint$new( - "GET", "/api/dataset//", - target_get_dataset, - returning = porcelain::porcelain_returning_json("DatasetMetadata")) -} - -delete_dataset <- function() { - porcelain::porcelain_endpoint$new( - "DELETE", "/api/dataset//", - target_delete_dataset, - returning = porcelain::porcelain_returning_json()) -} - -options_dataset <- function() { - porcelain::porcelain_endpoint$new( - "OPTIONS", "/api/dataset//", - function(name) "OK", - returning = porcelain::porcelain_returning_json()) -} - -get_datasets <- function() { - porcelain::porcelain_endpoint$new( - "GET", - "/api/datasets/", - target_get_datasets, - returning = porcelain::porcelain_returning_json("DatasetNames")) -} - -get_trace <- function() { - porcelain::porcelain_endpoint$new( - "GET", - "/api/dataset//trace//", - target_get_trace, - porcelain::porcelain_input_query(disaggregate = "string", - filter = "string", - scale = "string", - method = "string", - span = "numeric", - k = "numeric"), - returning = porcelain::porcelain_returning_json("DataSeries")) -} - -get_individual <- function() { - porcelain::porcelain_endpoint$new( - "GET", - "/api/dataset//individual//", - target_get_individual, - porcelain::porcelain_input_query(scale = "string", - color = "string", - filter = "string", - linetype = "string", - page = "numeric"), - returning = porcelain::porcelain_returning_json("Plotly")) -} - prune_inactive_sessions <- function(cache) { active_sessions <- cache$keys() subdirectories <- list.files("uploads") diff --git a/R/routes.R b/R/routes.R new file mode 100644 index 0000000..7527be6 --- /dev/null +++ b/R/routes.R @@ -0,0 +1,71 @@ +get_root <- function() { + porcelain::porcelain_endpoint$new( + "GET", + "/api/", + target_get_root, + returning = porcelain::porcelain_returning_json()) +} + +get_version <- function() { + porcelain::porcelain_endpoint$new( + "GET", + "/api/version/", + target_get_version, + returning = porcelain::porcelain_returning_json("Version")) +} + +get_dataset <- function() { + porcelain::porcelain_endpoint$new( + "GET", "/api/dataset//", + target_get_dataset, + returning = porcelain::porcelain_returning_json("DatasetMetadata")) +} + +delete_dataset <- function() { + porcelain::porcelain_endpoint$new( + "DELETE", "/api/dataset//", + target_delete_dataset, + returning = porcelain::porcelain_returning_json()) +} + +options_dataset <- function() { + porcelain::porcelain_endpoint$new( + "OPTIONS", "/api/dataset//", + function(name) "OK", + returning = porcelain::porcelain_returning_json()) +} + +get_datasets <- function() { + porcelain::porcelain_endpoint$new( + "GET", + "/api/datasets/", + target_get_datasets, + returning = porcelain::porcelain_returning_json("DatasetNames")) +} + +get_trace <- function() { + porcelain::porcelain_endpoint$new( + "GET", + "/api/dataset//trace//", + target_get_trace, + porcelain::porcelain_input_query(disaggregate = "string", + filter = "string", + scale = "string", + method = "string", + span = "numeric", + k = "numeric"), + returning = porcelain::porcelain_returning_json("DataSeries")) +} + +get_individual <- function() { + porcelain::porcelain_endpoint$new( + "GET", + "/api/dataset//individual//", + target_get_individual, + porcelain::porcelain_input_query(scale = "string", + color = "string", + filter = "string", + linetype = "string", + page = "numeric"), + returning = porcelain::porcelain_returning_json("Plotly")) +} diff --git a/tests/testthat/test-router.R b/tests/testthat/test-router.R index 9ef95b3..cd0e98f 100644 --- a/tests/testthat/test-router.R +++ b/tests/testthat/test-router.R @@ -64,6 +64,7 @@ test_that("DELETE /dataset returns 200 if dataset doesn't exist", { "/dataset/testdataset/", HTTP_COOKIE = cookie)) expect_equal(res$status, 200) + body <- jsonlite::fromJSON(res$body) expect_equal(body$data, "testdataset") }) From de1586f8c3780607a111e286ce56d49179fddd90 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 16:09:54 +0100 Subject: [PATCH 07/10] lint --- R/router.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/R/router.R b/R/router.R index 23d48f0..32369b2 100644 --- a/R/router.R +++ b/R/router.R @@ -68,8 +68,8 @@ setup_docs <- function(pr) { # already loaded library(redoc) pr$setDocs("redoc") - pr$mount("/schema", plumber::PlumberStatic$new(file.path(system.file("schema", - package = "serovizr")))) + pr$mount("/schema", plumber::PlumberStatic$new( + file.path(system.file("schema", package = "serovizr")))) pr } From 9dd8e7bd0591511dbd1f8ff1959ee0c45bb00093 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 16:18:39 +0100 Subject: [PATCH 08/10] declare yaml package, add comment --- DESCRIPTION | 3 ++- R/api.R | 9 +-------- R/dataset-validation.R | 9 +++++++++ docker/Dockerfile | 3 ++- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index ecb7262..f4a2aea 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -27,7 +27,8 @@ Imports: redoc, rlang, stringr, - tibble + tibble, + yaml Remotes: hillalex/porcelain@i39, Suggests: diff --git a/R/api.R b/R/api.R index 15da882..411f655 100644 --- a/R/api.R +++ b/R/api.R @@ -194,8 +194,6 @@ target_get_individual <- function(req, color = NULL, linetype = NULL, page = 1) { - .data <- value <- NULL - data <- read_dataset(req, name, scale) dat <- data$data xcol <- data$xcol @@ -244,6 +242,7 @@ get_paged_ids <- function(ids, current_page, page_length) { } get_aes <- function(color, linetype, xcol) { + .data <- value <- NULL if (is.null(color)) { if (is.null(linetype)) { aes <- ggplot2::aes(x = .data[[xcol]], y = value) @@ -347,12 +346,6 @@ apply_filter <- function(filter, dat, cols) { dat[dat[filter_var] == filter_level, ] } -bad_request_response <- function(msg) { - error <- list(error = "BAD_REQUEST", - detail = msg) - return(list(status = "failure", errors = list(error), data = NULL)) -} - get_or_create_session_id <- function(req) { if (is.null(req$session$id)) { logger::log_info("Creating new session id") diff --git a/R/dataset-validation.R b/R/dataset-validation.R index 07729b4..0eb0886 100644 --- a/R/dataset-validation.R +++ b/R/dataset-validation.R @@ -1,3 +1,12 @@ +# The POST /dataset endpoint isn't using Porcelain, so we can't use +# porcelain::porcelain_stop when something goes wrong. Instead we have +# to manually return failure responses with the desired error messages +bad_request_response <- function(msg) { + error <- list(error = "BAD_REQUEST", + detail = msg) + return(list(status = "failure", errors = list(error), data = NULL)) +} + invalid_file_type <- function(res) { res$status <- 400L msg <- "Invalid file type; please upload file of type text/csv." diff --git a/docker/Dockerfile b/docker/Dockerfile index 22da199..846af26 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -27,7 +27,8 @@ RUN install_packages --repo=https://mrc-ide.r-universe.dev \ remotes \ Rook \ stringr \ - tibble + tibble \ + yaml RUN Rscript -e "install.packages('remotes')" RUN Rscript -e 'remotes::install_github("hillalex/porcelain@i39")' From f830d508d4ed928806460b03768331ff7a3ce9b9 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Thu, 26 Sep 2024 16:41:37 +0100 Subject: [PATCH 09/10] fix package check --- R/router.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/router.R b/R/router.R index 32369b2..55be7d7 100644 --- a/R/router.R +++ b/R/router.R @@ -66,7 +66,7 @@ setup_docs <- function(pr) { pr$setApiSpec(api) # this is a bit annoying, but setDocs fails if the package isn't # already loaded - library(redoc) + requireNamespace("redoc") pr$setDocs("redoc") pr$mount("/schema", plumber::PlumberStatic$new( file.path(system.file("schema", package = "serovizr")))) From 7b49e631e27553afbd905ebf694bfb9b914e3d27 Mon Sep 17 00:00:00 2001 From: "alex.hill@gmail.com" Date: Fri, 27 Sep 2024 14:44:10 +0100 Subject: [PATCH 10/10] more examples in spec --- inst/spec.yaml | 140 ++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 122 insertions(+), 18 deletions(-) diff --git a/inst/spec.yaml b/inst/spec.yaml index 0f46dad..dd5bda4 100644 --- a/inst/spec.yaml +++ b/inst/spec.yaml @@ -39,6 +39,7 @@ paths: value: status: 'success' data: 'Welcome to serovizr' + errors: null /version/: get: description: Returns current version of the API package @@ -67,6 +68,7 @@ paths: value: status: 'success' data: '1.0.0' + errors: null /datasets/: get: description: List available datasets for the given session id @@ -97,6 +99,7 @@ paths: data: - dataset_one - another_dataset + errors: null /dataset/{name}: get: description: Returns metadata about the named dataset @@ -303,6 +306,56 @@ paths: - data - errors additionalProperties: false + examples: + hcw: + value: + status: 'success' + data: + - name: H3N2 + model: + x: + - 40 + - 41 + 'y': + - 2.8 + - 2.9 + raw: + x: + - 40 + - 40 + - 40 + - 41 + - 41 + - 41 + 'y': + - 2 + - 3 + - 3 + - 4 + - 2 + - 4 + warnings: + - span is too small + '400': + description: Bad request + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' + examples: + scale: + value: + status: 'failure' + data: null + errors: + - error: 'BAD_REQUEST' + detail: "'scale' must be one of 'log', 'log2', or 'natural'" + '404': + description: Dataset not found + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' /dataset/{name}/individual/{pidcol}/: get: description: Returns plotly config for individual trajectories @@ -364,21 +417,72 @@ paths: - data - errors additionalProperties: false - '400': - description: Validation errors - content: - application/json: - schema: - $ref: 'schema/ResponseFailure.schema.json' - examples: - id: - value: - errors: - - error: 'BAD_REQUEST' - details: Id column 'person_id' not found. - '404': - description: Dataset not found - content: - application/json: - schema: - $ref: 'schema/ResponseFailure.schema.json' + examples: + hcw: + value: + status: 'success' + data: + data: + - x: + - 1 + - 2 + y: + - 2 + - 3 + type: scatter + mode: lines + xaxis: x + yaxis: y + - x: + - 1 + - 2 + y: + - 5 + - 6 + type: scatter + mode: lines + xaxis: x2 + yaxis: y + layout: + xaxis: + type: linear + autorange: false + range: + - 1 + - 2 + xaxis2: + type: linear + autorange: false + range: + - 1 + - 2 + yaxis: + type: linear + autorange: false + range: + - 0 + - 6 + page: 1 + numPages: 1 + warnings: null + errors: null + '400': + description: Validation errors + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json' + examples: + id: + value: + status: 'failure' + data: null + errors: + - error: 'BAD_REQUEST' + details: Id column 'person_id' not found. + '404': + description: Dataset not found + content: + application/json: + schema: + $ref: 'schema/ResponseFailure.schema.json'