Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/R-CMD-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ jobs:
- name: Install Keras backend
# The keras3 package is installed by the previous step (as a dependency)
# This step installs the required Python libraries (e.g., tensorflow).
run: keras3::install_keras()
run: keras3::install_keras(envname = "r-reticulate")
shell: Rscript {0}

- uses: r-lib/actions/check-r-package@v2
Expand Down
1 change: 1 addition & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export(keras_losses)
export(keras_metrics)
export(keras_optimizers)
export(loss_function_keras)
export(model_exists)
export(optimizer_function)
export(process_x_functional)
export(process_x_sequential)
Expand Down
150 changes: 130 additions & 20 deletions R/compile_keras_grid.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,35 +42,49 @@
#' @examples
#' \donttest{
#' if (requireNamespace("keras3", quietly = TRUE)) {
#' library(keras3)
#' library(parsnip)
#' library(dials)
#'
#' # 1. Define a kerasnip model specification
#' # 1. Define layer blocks
#' input_block <- function(model, input_shape) {
#' keras_model_sequential(input_shape = input_shape)
#' }
#' hidden_block <- function(model, units = 32) {
#' model |> layer_dense(units = units, activation = "relu")
#' }
#' output_block <- function(model, num_classes) {
#' model |> layer_dense(units = num_classes, activation = "softmax")
#' }
#'
#' # 2. Define a kerasnip model specification
#' create_keras_sequential_spec(
#' model_name = "my_mlp",
#' model_name = "my_mlp_grid",
#' layer_blocks = list(
#' input_block,
#' hidden_block,
#' output_block
#' input = input_block,
#' hidden = hidden_block,
#' output = output_block
#' ),
#' mode = "classification"
#' )
#'
#' mlp_spec <- my_mlp(
#' mlp_spec <- my_mlp_grid(
#' hidden_units = tune(),
#' compile_loss = "categorical_crossentropy",
#' compile_optimizer = "adam"
#' )
#'
#' # 2. Create a hyperparameter grid
#' # 3. Create a hyperparameter grid
#' # Include an invalid value (-10) to demonstrate error handling
#' param_grid <- tibble::tibble(
#' hidden_units = c(32, 64, -10)
#' )
#'
#' # 3. Prepare dummy data
#' # 4. Prepare dummy data
#' x_train <- matrix(rnorm(100 * 10), ncol = 10)
#' y_train <- factor(sample(0:1, 100, replace = TRUE))
#'
#' # 4. Compile models over the grid
#' # 5. Compile models over the grid
#' compiled_grid <- compile_keras_grid(
#' spec = mlp_spec,
#' grid = param_grid,
Expand All @@ -79,8 +93,9 @@
#' )
#'
#' print(compiled_grid)
#' remove_keras_spec("my_mlp_grid")
#'
#' # 5. Inspect the results
#' # 6. Inspect the results
#' # The row with `hidden_units = -10` will show an error.
#' }
#' }
Expand Down Expand Up @@ -194,15 +209,61 @@ compile_keras_grid <- function(spec, grid, x, y) {
#'
#' @examples
#' \donttest{
#' # Continuing the example from `compile_keras_grid`:
#' if (requireNamespace("keras3", quietly = TRUE)) {
#' library(keras3)
#' library(parsnip)
#' library(dials)
#'
#' # 1. Define layer blocks
#' input_block <- function(model, input_shape) {
#' keras_model_sequential(input_shape = input_shape)
#' }
#' hidden_block <- function(model, units = 32) {
#' model |> layer_dense(units = units, activation = "relu")
#' }
#' output_block <- function(model, num_classes) {
#' model |> layer_dense(units = num_classes, activation = "softmax")
#' }
#'
#' # 2. Define a kerasnip model specification
#' create_keras_sequential_spec(
#' model_name = "my_mlp_grid_2",
#' layer_blocks = list(
#' input = input_block,
#' hidden = hidden_block,
#' output = output_block
#' ),
#' mode = "classification"
#' )
#'
#' mlp_spec <- my_mlp_grid_2(
#' hidden_units = tune(),
#' compile_loss = "categorical_crossentropy",
#' compile_optimizer = "adam"
#' )
#'
#' # 3. Create a hyperparameter grid
#' param_grid <- tibble::tibble(
#' hidden_units = c(32, 64, -10)
#' )
#'
#' # `compiled_grid` contains one row with an error.
#' valid_grid <- extract_valid_grid(compiled_grid)
#' # 4. Prepare dummy data
#' x_train <- matrix(rnorm(100 * 10), ncol = 10)
#' y_train <- factor(sample(0:1, 100, replace = TRUE))
#'
#' # `valid_grid` now only contains the rows that compiled successfully.
#' print(valid_grid)
#' # 5. Compile models over the grid
#' compiled_grid <- compile_keras_grid(
#' spec = mlp_spec,
#' grid = param_grid,
#' x = x_train,
#' y = y_train
#' )
#'
#' # This clean grid can now be passed to tune::tune_grid().
#' # 6. Extract the valid grid
#' valid_grid <- extract_valid_grid(compiled_grid)
#' print(valid_grid)
#' remove_keras_spec("my_mlp_grid_2")
#' }
#' }
#' @export
extract_valid_grid <- function(compiled_grid) {
Expand Down Expand Up @@ -242,11 +303,60 @@ extract_valid_grid <- function(compiled_grid) {
#'
#' @examples
#' \donttest{
#' # Continuing the example from `compile_keras_grid`:
#' if (requireNamespace("keras3", quietly = TRUE)) {
#' library(keras3)
#' library(parsnip)
#' library(dials)
#'
#' # 1. Define layer blocks
#' input_block <- function(model, input_shape) {
#' keras_model_sequential(input_shape = input_shape)
#' }
#' hidden_block <- function(model, units = 32) {
#' model |> layer_dense(units = units, activation = "relu")
#' }
#' output_block <- function(model, num_classes) {
#' model |> layer_dense(units = num_classes, activation = "softmax")
#' }
#'
#' # `compiled_grid` contains one row with an error.
#' # This will print a formatted summary of that error.
#' inform_errors(compiled_grid)
#' # 2. Define a kerasnip model specification
#' create_keras_sequential_spec(
#' model_name = "my_mlp_grid_3",
#' layer_blocks = list(
#' input = input_block,
#' hidden = hidden_block,
#' output = output_block
#' ),
#' mode = "classification"
#' )
#'
#' mlp_spec <- my_mlp_grid_3(
#' hidden_units = tune(),
#' compile_loss = "categorical_crossentropy",
#' compile_optimizer = "adam"
#' )
#'
#' # 3. Create a hyperparameter grid
#' param_grid <- tibble::tibble(
#' hidden_units = c(32, 64, -10)
#' )
#'
#' # 4. Prepare dummy data
#' x_train <- matrix(rnorm(100 * 10), ncol = 10)
#' y_train <- factor(sample(0:1, 100, replace = TRUE))
#'
#' # 5. Compile models over the grid
#' compiled_grid <- compile_keras_grid(
#' spec = mlp_spec,
#' grid = param_grid,
#' x = x_train,
#' y = y_train
#' )
#'
#' # 6. Inform about errors
#' inform_errors(compiled_grid)
#' remove_keras_spec("my_mlp_grid_3")
#' }
#' }
#' @export
inform_errors <- function(compiled_grid, n = 10) {
Expand Down
1 change: 1 addition & 0 deletions R/create_keras_functional_spec.R
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@
#' # model_spec <- my_resnet_spec(num_dense_path = 2, dense_path_units = 32)
#'
#' print(model_spec)
#' remove_keras_spec("my_resnet_spec")
#' # tune::tunable(model_spec)
#' }
#' }
Expand Down
5 changes: 3 additions & 2 deletions R/create_keras_sequential_spec.R
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@
#'
#' # 2. Create the spec, providing blocks in the correct order.
#' create_keras_sequential_spec(
#' model_name = "my_mlp",
#' model_name = "my_mlp_seq_spec",
#' layer_blocks = list(
#' input = input_block,
#' hidden = hidden_block,
Expand All @@ -95,14 +95,15 @@
#'
#' # 3. Use the newly created specification function!
#' # Note the new arguments `num_hidden` and `hidden_units`.
#' model_spec <- my_mlp(
#' model_spec <- my_mlp_seq_spec(
#' num_hidden = 2,
#' hidden_units = 64,
#' epochs = 10,
#' learn_rate = 0.01
#' )
#'
#' print(model_spec)
#' remove_keras_spec("my_mlp_seq_spec")
#' }
#' }
create_keras_sequential_spec <- function(
Expand Down
9 changes: 5 additions & 4 deletions R/generic_functional_fit.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,14 @@
#' # It is called internally by `parsnip::fit()`.
#' # For example:
#' \donttest{
#' library(parsnip)
#' # create_keras_functional_spec(...) defines my_functional_model
#'
#' spec <- my_functional_model(hidden_units = 128, fit_epochs = 10) |>
#' set_engine("keras")
#' # spec <- my_functional_model(hidden_units = 128, fit_epochs = 10) |>
#' # set_engine("keras")
#'
#' # This call to fit() would invoke generic_functional_fit() internally
#' fitted_model <- fit(spec, y ~ x, data = training_data)
#' # # This call to fit() would invoke generic_functional_fit() internally
#' # fitted_model <- fit(spec, y ~ x, data = training_data)
#' }
#' @keywords internal
#' @export
Expand Down
9 changes: 5 additions & 4 deletions R/generic_sequential_fit.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,14 @@
#' # It is called internally by `parsnip::fit()`.
#' # For example:
#' \donttest{
#' library(parsnip)
#' # create_keras_sequential_spec(...) defines my_sequential_model
#'
#' spec <- my_sequential_model(hidden_1_units = 128, fit_epochs = 10) |>
#' set_engine("keras")
#' # spec <- my_sequential_model(hidden_1_units = 128, fit_epochs = 10) |>
#' # set_engine("keras")
#'
#' # This call to fit() would invoke generic_sequential_fit() internally
#' fitted_model <- fit(spec, y ~ x, data = training_data)
#' # # This call to fit() would invoke generic_sequential_fit() internally
#' # fitted_model <- fit(spec, y ~ x, data = training_data)
#' }
#' @keywords internal
#' @export
Expand Down
32 changes: 25 additions & 7 deletions R/keras_tools.R
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,32 @@
#' @examples
#' \donttest{
#' if (requireNamespace("keras3", quietly = TRUE)) {
#' library(keras3)
#' library(parsnip)
#'
#' # 1. Define and fit a model ----
#' # 1. Define layer blocks
#' input_block <- function(model, input_shape) {
#' keras_model_sequential(input_shape = input_shape)
#' }
#' hidden_block <- function(model, units = 32) {
#' model |> layer_dense(units = units, activation = "relu")
#' }
#' output_block <- function(model, num_classes) {
#' model |> layer_dense(units = num_classes, activation = "softmax")
#' }
#'
#' # 2. Define and fit a model ----
#' create_keras_sequential_spec(
#' model_name = "my_mlp",
#' layer_blocks = list(input_block, hidden_block, output_block),
#' model_name = "my_mlp_tools",
#' layer_blocks = list(
#' input = input_block,
#' hidden = hidden_block,
#' output = output_block
#' ),
#' mode = "classification"
#' )
#'
#' mlp_spec <- my_mlp(
#' mlp_spec <- my_mlp_tools(
#' hidden_units = 32,
#' compile_loss = "categorical_crossentropy",
#' compile_optimizer = "adam",
Expand All @@ -42,20 +59,21 @@
#'
#' fitted_mlp <- fit(mlp_spec, y ~ x, data = train_df)
#'
#' # 2. Evaluate the model on new data ----
#' # 3. Evaluate the model on new data ----
#' x_test <- matrix(rnorm(50 * 10), ncol = 10)
#' y_test <- factor(sample(0:1, 50, replace = TRUE))
#'
#' eval_metrics <- keras_evaluate(fitted_mlp, x_test, y_test)
#' print(eval_metrics)
#'
#' # 3. Extract the Keras model object ----
#' # 4. Extract the Keras model object ----
#' keras_model <- extract_keras_model(fitted_mlp)
#' summary(keras_model)
#'
#' # 4. Extract the training history ----
#' # 5. Extract the training history ----
#' history <- extract_keras_history(fitted_mlp)
#' plot(history)
#' remove_keras_spec("my_mlp_tools")
#' }
#' }
#' @export
Expand Down
2 changes: 1 addition & 1 deletion R/remove_keras_spec.R
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
#'
#' # Check it's gone
#' !exists("my_temp_model")
#' !"my_temp_model" %in% parsnip::show_engines(NULL)$model
#' !model_exists("my_temp_model")
#' }
#' }
remove_keras_spec <- function(model_name, env = parent.frame()) {
Expand Down
27 changes: 27 additions & 0 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -416,3 +416,30 @@ get_model_env <- function() {
current <- utils::getFromNamespace("parsnip", ns = "parsnip")
current
}

#' Check if a Kerasnip Model Specification Exists
#'
#' @description
#' This is an internal helper function to check if a model specification has been
#' registered in the `parsnip` model environment.
#'
#' @param model_name A character string giving the name of the model
#' specification function to check (e.g., "my_mlp").
#' @return A logical value, `TRUE` if the model exists, `FALSE` otherwise.
#' @examples
#' \donttest{
#' if (requireNamespace("parsnip", quietly = TRUE)) {
#' library(parsnip)
#'
#' # Check for a model that exists in parsnip
#' model_exists("mlp")
#'
#' # Check for a model that does not exist
#' model_exists("non_existent_model")
#' }
#' }
#' @keywords internal
#' @export
model_exists <- function(model_name) {
model_name %in% ls(get_model_env())
}
Loading
Loading