diff --git a/R/authenticate.R b/R/authenticate.R index 36e67a4..568cbf6 100644 --- a/R/authenticate.R +++ b/R/authenticate.R @@ -1,6 +1,6 @@ -authenticate <- function(request, error_call = caller_env()){ +authenticate <- function(request, dry_run = FALSE, error_call = caller_env()){ key <- Sys.getenv("MISTRAL_API_KEY") - if (identical(key, "")) { + if (!is_true(dry_run) && identical(key, "")) { cli_abort(call = error_call, c( "Please set the {.code MISTRAL_API_KEY} environment variable", i = "Get an API key from {.url https://console.mistral.ai/api-keys/}", @@ -9,4 +9,3 @@ authenticate <- function(request, error_call = caller_env()){ } req_auth_bearer_token(request, key) } - diff --git a/R/chat.R b/R/chat.R index 1e9ade6..ba60d22 100644 --- a/R/chat.R +++ b/R/chat.R @@ -2,28 +2,33 @@ #' #' @param text some text #' @param model which model to use. See [models()] for more information about which models are available +#' @param dry_run if TRUE the request is not performed #' @param ... ignored #' @inheritParams httr2::req_perform #' -#' @return Result text from Mistral +#' @return A tibble with columns `role` and `content` with class `chat_tibble` or a request +#' if this is a `dry_run` #' #' @examples -#' \dontrun{ -#' chat("Top 5 R packages") -#' } +#' chat("Top 5 R packages", dry_run = TRUE) #' #' @export -chat <- function(text = "What are the top 5 R packages ?", model = "mistral-tiny", ..., error_call = current_env()) { - req_chat(text, model, error_call = error_call) |> - req_mistral_perform(error_call = error_call) |> - resp_chat(error_call = error_call) +chat <- function(text = "What are the top 5 R packages ?", model = "mistral-tiny", dry_run = FALSE, ..., error_call = current_env()) { + req <- req_chat(text, model, error_call = error_call, dry_run = dry_run) + if (is_true(dry_run)) { + return(req) + } + resp <- req_mistral_perform(req, error_call = error_call) + resp_chat(resp, error_call = error_call) } -req_chat <- function(text = "What are the top 5 R packages ?", model = "mistral-tiny", stream = FALSE, error_call = caller_env()) { - check_model(model, error_call = error_call) +req_chat <- function(text = "What are the top 5 R packages ?", model = "mistral-tiny", stream = FALSE, dry_run = FALSE, error_call = caller_env()) { + if (!is_true(dry_run)) { + check_model(model, error_call = error_call) + } request(mistral_base_url) |> req_url_path_append("v1", "chat", "completions") |> - authenticate(error_call = error_call) |> + authenticate(error_call = error_call, dry_run = dry_run) |> req_body_json( list( model = model, diff --git a/R/models.R b/R/models.R index 43e52f8..1282b9a 100644 --- a/R/models.R +++ b/R/models.R @@ -1,24 +1,25 @@ #' Retrieve all models available in the Mistral API #' -#' @inheritParams httr2::req_perform +#' @inheritParams chat #' #' @return A character vector with the models available in the Mistral API #' #' @examples -#' \dontrun{ -#' models() -#' } +#' models(dry_run = TRUE) #' #' @export -models <- function(error_call = current_env()) { - +models <- function(error_call = caller_env(), dry_run = FALSE) { req <- request(mistral_base_url) |> req_url_path_append("v1", "models") |> - authenticate(error_call = call) |> + authenticate(error_call = call, dry_run = dry_run) |> req_cache(tempdir(), use_on_error = TRUE, max_age = 2 * 60 * 60) # 2 hours + if (is_true(dry_run)) { + return(req) + } + req_mistral_perform(req, error_call = error_call) |> resp_body_json(simplifyVector = TRUE) |> pluck("data","id") diff --git a/R/stream.R b/R/stream.R index 0524af2..aaf793b 100644 --- a/R/stream.R +++ b/R/stream.R @@ -1,11 +1,16 @@ #' stream #' #' @inheritParams chat +#' #' @export -stream <- function(text, model = "mistral-tiny", ..., error_call = current_env()) { +stream <- function(text, model = "mistral-tiny", dry_run = FALSE, ..., error_call = current_env()) { check_model(model, error_call = error_call) - req <- req_chat(text, model, stream = TRUE, error_call = error_call) + req <- req_chat(text, model, stream = TRUE, error_call = error_call, dry_run = dry_run) + if (is_true(dry_run)) { + return(req) + } + resp <- req_perform_stream(req, callback = stream_callback, round = "line", diff --git a/man/chat.Rd b/man/chat.Rd index e76f9ea..67f8483 100644 --- a/man/chat.Rd +++ b/man/chat.Rd @@ -7,6 +7,7 @@ chat( text = "What are the top 5 R packages ?", model = "mistral-tiny", + dry_run = FALSE, ..., error_call = current_env() ) @@ -16,6 +17,8 @@ chat( \item{model}{which model to use. See \code{\link[=models]{models()}} for more information about which models are available} +\item{dry_run}{if TRUE the request is not performed} + \item{...}{ignored} \item{error_call}{The execution environment of a currently @@ -24,14 +27,13 @@ mentioned in error messages as the source of the error. See the \code{call} argument of \code{\link[rlang:abort]{abort()}} for more information.} } \value{ -Result text from Mistral +A tibble with columns \code{role} and \code{content} with class \code{chat_tibble} or a request +if this is a \code{dry_run} } \description{ Chat with the Mistral api } \examples{ -\dontrun{ -chat("Top 5 R packages") -} +chat("Top 5 R packages", dry_run = TRUE) } diff --git a/man/models.Rd b/man/models.Rd index 0caeefb..5427914 100644 --- a/man/models.Rd +++ b/man/models.Rd @@ -4,13 +4,15 @@ \alias{models} \title{Retrieve all models available in the Mistral API} \usage{ -models(error_call = current_env()) +models(error_call = caller_env(), dry_run = FALSE) } \arguments{ \item{error_call}{The execution environment of a currently running function, e.g. \code{caller_env()}. The function will be mentioned in error messages as the source of the error. See the \code{call} argument of \code{\link[rlang:abort]{abort()}} for more information.} + +\item{dry_run}{if TRUE the request is not performed} } \value{ A character vector with the models available in the Mistral API @@ -19,8 +21,6 @@ A character vector with the models available in the Mistral API Retrieve all models available in the Mistral API } \examples{ -\dontrun{ - models() -} +models(dry_run = TRUE) } diff --git a/man/stream.Rd b/man/stream.Rd index a5db2af..7e7ae64 100644 --- a/man/stream.Rd +++ b/man/stream.Rd @@ -4,13 +4,21 @@ \alias{stream} \title{stream} \usage{ -stream(text, model = "mistral-tiny", ..., error_call = current_env()) +stream( + text, + model = "mistral-tiny", + dry_run = FALSE, + ..., + error_call = current_env() +) } \arguments{ \item{text}{some text} \item{model}{which model to use. See \code{\link[=models]{models()}} for more information about which models are available} +\item{dry_run}{if TRUE the request is not performed} + \item{...}{ignored} \item{error_call}{The execution environment of a currently