Skip to content

Commit cfe833f

Browse files
authored
test with new paradox (#109)
* test with new paradox * code quality * more checks * test with new paradox * trigger actions * dev cmd check with paradox master * news
1 parent a44f804 commit cfe833f

File tree

5 files changed

+13
-10
lines changed

5 files changed

+13
-10
lines changed

.github/workflows/dev-cmd-check.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ jobs:
2727
- {os: ubuntu-latest, r: 'release', dev-package: 'mlr-org/bbotk'}
2828
- {os: ubuntu-latest, r: 'release', dev-package: 'mlr-org/mlr3'}
2929
- {os: ubuntu-latest, r: 'release', dev-package: 'mlr-org/mlr3tuning'}
30+
- {os: ubuntu-latest, r: 'release', dev-package: "mlr-org/mlr3tuning', 'mlr-org/mlr3learners', 'mlr-org/mlr3pipelines', 'mlr-org/bbotk', 'mlr-org/paradox"}
3031

3132
steps:
3233
- uses: actions/checkout@v3
@@ -41,7 +42,7 @@ jobs:
4142
needs: check
4243

4344
- name: Install dev versions
44-
run: pak::pkg_install('${{ matrix.config.dev-package }}')
45+
run: pak::pkg_install(c('${{ matrix.config.dev-package }}'))
4546
shell: Rscript {0}
4647

4748
- uses: r-lib/actions/check-r-package@v2

NEWS.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# mlr3hyperband (development version)
22

3+
* Compatibility with upcoming 'paradox' release.
4+
35
# mlr3hyperband 0.4.5
46

57
* fix: Unloading `mlr3hyperband` removes optimizers and tuners from the dictionaries.

R/OptimizerHyperband.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ OptimizerHyperband = R6Class("OptimizerHyperband",
8080
#' Creates a new instance of this [R6][R6::R6Class] class.
8181
initialize = function() {
8282
param_set = ps(
83-
eta = p_dbl(lower = 1.0001, tags = "required", default = 2),
83+
eta = p_dbl(lower = 1.0001, tags = "required"),
8484
sampler = p_uty(custom_check = function(x) check_r6(x, "Sampler", null.ok = TRUE)),
8585
repetitions = p_int(lower = 1L, default = 1, special_vals = list(Inf))
8686
)

tests/testthat/test_TunerHyperband.R

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ test_that("TunerHyperband works with custom sampler", {
7474
iter = to_tune(p_int(1, 4, tags = "budget"))
7575
)
7676

77-
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]], function(n) rbeta(n, 2, 5))
77+
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]] %??% learner$param_set$search_space()$subset("x"), function(n) rbeta(n, 2, 5))
7878

7979
test_tuner_hyperband(eta = 2, learner, sampler = sampler)
8080
})
@@ -86,7 +86,7 @@ test_that("TunerHyperband errors if not enough parameters are sampled", {
8686
iter = to_tune(p_int(1, 4, tags = "budget"))
8787
)
8888

89-
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]], function(n) rbeta(n, 2, 5))
89+
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]] %??% learner$param_set$search_space()$subset("x"), function(n) rbeta(n, 2, 5))
9090

9191
expect_error(tune(
9292
tnr( "hyperband", sampler = sampler),
@@ -106,8 +106,8 @@ test_that("TunerHyperband errors if budget parameter is sampled", {
106106
)
107107

108108
sampler = SamplerJointIndep$new(list(
109-
Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]], function(n) rbeta(n, 2, 5)),
110-
Sampler1D$new(learner$param_set$search_space()$params[["iter"]])
109+
Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]] %??% learner$param_set$search_space()$subset("x"), function(n) rbeta(n, 2, 5)),
110+
Sampler1D$new(learner$param_set$search_space()$params[["iter"]] %??% learner$param_set$search_space()$subset("iter"))
111111
))
112112

113113
expect_error(tune(

tests/testthat/test_TunerSuccessiveHalving.R

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ test_that("TunerSuccessiveHalving works with custom sampler", {
9191
iter = to_tune(p_int(1, 4, tags = "budget"))
9292
)
9393

94-
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]], function(n) rbeta(n, 2, 5))
94+
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]] %??% learner$param_set$search_space()$subset("x"), function(n) rbeta(n, 2, 5))
9595

9696
test_tuner_successive_halving(n = 16, eta = 2, learner, sampler = sampler)
9797
})
@@ -103,7 +103,7 @@ test_that("TunerSuccessiveHalving errors if not enough parameters are sampled",
103103
iter = to_tune(p_int(1, 4, tags = "budget"))
104104
)
105105

106-
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]], function(n) rbeta(n, 2, 5))
106+
sampler = Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]] %??% learner$param_set$search_space()$subset("x"), function(n) rbeta(n, 2, 5))
107107

108108
expect_error(tune(
109109
tnr( "successive_halving", sampler = sampler),
@@ -123,8 +123,8 @@ test_that("TunerSuccessiveHalving errors if budget parameter is sampled", {
123123
)
124124

125125
sampler = SamplerJointIndep$new(list(
126-
Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]], function(n) rbeta(n, 2, 5)),
127-
Sampler1D$new(learner$param_set$search_space()$params[["iter"]])
126+
Sampler1DRfun$new(learner$param_set$search_space()$params[["x"]] %??% learner$param_set$search_space()$subset("x"), function(n) rbeta(n, 2, 5)),
127+
Sampler1D$new(learner$param_set$search_space()$params[["iter"]] %??% learner$param_set$search_space()$subset("iter"))
128128
))
129129

130130
expect_error(tune(

0 commit comments

Comments
 (0)