Skip to content

Commit 693fae1

Browse files
committed
Update docs
1 parent 5c6ab4c commit 693fae1

File tree

4 files changed

+168
-2
lines changed

4 files changed

+168
-2
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "Jutul"
22
uuid = "2b460a1a-8a2b-45b2-b125-b5c536396eb9"
33
authors = ["Olav Møyner <olav.moyner@gmail.com>"]
4-
version = "0.4.2"
4+
version = "0.4.3"
55

66
[deps]
77
AlgebraicMultigrid = "2169fc97-5a83-5252-b627-83903c6c433c"

docs/src/usage.md

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,23 @@ Jutul.solve_numerical_sensitivities
8484
setup_parameter_optimization
8585
```
8686

87+
### Generic optimization interface
88+
89+
```@docs
90+
DictParameters
91+
```
92+
93+
```@docs
94+
free_optimization_parameter!
95+
freeze_optimization_parameter!
96+
set_optimization_parameter!
97+
```
98+
99+
```@docs
100+
optimize
101+
parameters_gradient
102+
```
103+
87104
## Linear solvers
88105

89106
```@docs

src/DictOptimization/interface.jl

Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,55 @@
1+
"""
2+
optimized_dict = optimize(dopt, objective)
3+
optimize(dopt::DictParameters, objective, setup_fn = dopt.setup_function;
4+
grad_tol = 1e-6,
5+
obj_change_tol = 1e-6,
6+
max_it = 25,
7+
opt_fun = missing,
8+
maximize = false,
9+
simulator = missing,
10+
config = missing,
11+
solution_history = false,
12+
backend_arg = (
13+
use_sparsity = false,
14+
di_sparse = true,
15+
single_step_sparsity = false,
16+
do_prep = true,
17+
),
18+
kwarg...
19+
)
20+
21+
Optimize parameters defined in a [`DictParameters`](@ref) object using the
22+
provided objective function. At least one variable has to be declared to be free
23+
using `free_optimization_parameter!` prior to calling the optimizer.
24+
25+
# Arguments
26+
- `dopt::DictParameters`: Container with parameters to optimize
27+
- `objective`: The objective function to minimize (or maximize)
28+
- `setup_fn`: Function to set up the optimization problem. Defaults to `dopt.setup_function`
29+
30+
# Keyword Arguments
31+
- `grad_tol`: Gradient tolerance for stopping criterion
32+
- `obj_change_tol`: Objective function change tolerance for stopping criterion
33+
- `max_it`: Maximum number of iterations
34+
- `opt_fun`: Optional custom optimization function. If missing, L-BFGS will be used
35+
- `maximize`: Set to `true` to maximize the objective instead of minimizing
36+
- `simulator`: Optional simulator object used in forward simulations
37+
- `config`: Optional configuration for the setup
38+
- `solution_history`: If `true`, stores all intermediate solutions
39+
- `backend_arg`: Options for the autodiff backend:
40+
- `use_sparsity`: Enable sparsity detection for the objective function
41+
- `di_sparse`: Use sparse differentiation
42+
- `single_step_sparsity`: Enable single step sparsity detection (if sparsity does not change during timesteps)
43+
- `do_prep`: Perform preparation step
44+
45+
# Returns
46+
The optimized parameters as a dictionary.
47+
48+
# Notes
49+
- The function stores the optimization history and optimized parameters in the input `dopt` object.
50+
- If `solution_history` is `true`, intermediate solutions are stored in `dopt.history.solutions`.
51+
- The default optimization algorithm is L-BFGS with box constraints.
52+
"""
153
function optimize(dopt::DictParameters, objective, setup_fn = dopt.setup_function;
254
grad_tol = 1e-6,
355
obj_change_tol = 1e-6,
@@ -81,6 +133,15 @@ function optimize(dopt::DictParameters, objective, setup_fn = dopt.setup_functio
81133
return prm_out
82134
end
83135

136+
"""
137+
parameters_gradient(dopt::DictParameters, objective, setup_fn = dopt.setup_function)
138+
139+
Compute the gradient of the objective function with respect to the parameters
140+
defined in the `DictParameters` object. This function will return the gradient
141+
as a dictionary with the same structure as the input parameters, where each
142+
entry is a vector of gradients for each parameter. Only gradients with respect
143+
to free parameters will be computed.
144+
"""
84145
function parameters_gradient(dopt::DictParameters, objective, setup_fn = dopt.setup_function;
85146
simulator = missing,
86147
config = missing,
@@ -118,6 +179,16 @@ function parameters_gradient(dopt::DictParameters, objective, setup_fn = dopt.se
118179
return out
119180
end
120181

182+
"""
183+
freeze_optimization_parameter!(dopt, "parameter_name")
184+
freeze_optimization_parameter!(dopt, ["dict_name", "parameter_name"])
185+
freeze_optimization_parameter!(dopt::DictParameters, parameter_name, val = missing)
186+
187+
Freeze an optimization parameter in the `DictParameters` object. This will
188+
remove the parameter from the optimization targets and set its value to `val` if
189+
provided. Any limits/lumping/scaling settings for this parameter will be
190+
removed.
191+
"""
121192
function freeze_optimization_parameter!(dopt::DictParameters, parameter_name, val = missing)
122193
parameter_name = convert_key(parameter_name)
123194
if !ismissing(val)
@@ -126,6 +197,54 @@ function freeze_optimization_parameter!(dopt::DictParameters, parameter_name, va
126197
delete!(dopt.parameter_targets, parameter_name)
127198
end
128199

200+
"""
201+
free_optimization_parameter!(dopt, "parameter_name", rel_min = 0.01, rel_max = 100.0)
202+
free_optimization_parameter!(dopt, ["dict_name", "parameter_name"], abs_min = -8.0, abs_max = 7.0)
203+
204+
Free an existing parameter for optimization in the `DictParameters` object. This
205+
will allow the parameter to be optimized through a call to [`optimize`](@ref).
206+
207+
# Nesting structures
208+
If your `DictParameters` has a nesting structure, you can use a vector of
209+
strings or symbols to specify the parameter name, e.g. `["dict_name",
210+
"parameter_name"]` to access the parameter located at
211+
`["dict_name"]["parameter_name"]`.
212+
213+
# Setting limits
214+
The limits can be set using the following keyword arguments:
215+
- `abs_min`: Absolute minimum value for the parameter. If not set, no absolute
216+
minimum will be applied.
217+
- `abs_max`: Absolute maximum value for the parameter. If not set, no absolute
218+
maximum will be applied.
219+
- `rel_min`: Relative minimum value for the parameter. If not set, no relative
220+
minimum will be applied.
221+
- `rel_max`: Relative maximum value for the parameter. If not set, no relative
222+
maximum will be applied.
223+
224+
For either of these entries it is possible to pass either a scalar, or an array.
225+
If an array is passed, it must have the same size as the parameter being set.
226+
227+
Note that if `dopt.strict` is set to `true`, at least one of the upper or lower
228+
bounds must be set for free parameters. If `dopt.strict` is set to `false`, the
229+
bounds are optional and the `DictParameters` object can be used to compute
230+
sensitivities, but the built-in optimization routine assumes that finite limits
231+
are set for all parameters.
232+
233+
# Other keyword arguments
234+
- `initial`: Initial value for the parameter. If not set, the current value in
235+
`dopt.parameters` will be used.
236+
- `scaler=missing`: Optional scaler for the parameter. If not set, no scaling
237+
will be applied. Available scalers are `:log`, `:exp`. The scaler will be
238+
applied
239+
- `lumping=missing`: Optional lumping array for the parameter. If not set, no
240+
lumping will be applied. The lumping array should have the same size as the
241+
parameter and contain positive integers. The lumping array defines groups of
242+
indices that should be lumped together, i.e. the same value will be used for
243+
all indices in the same group. The lumping array should contain all integers
244+
from 1 to the maximum value in the array, and all indices in the same group
245+
should have the same value in the initial parameter, otherwise an error will
246+
be thrown.
247+
"""
129248
function free_optimization_parameter!(dopt::DictParameters, parameter_name;
130249
initial = missing,
131250
abs_min = -Inf,
@@ -191,6 +310,12 @@ function free_optimization_parameters!(dopt::DictParameters, targets = all_keys(
191310
return dopt
192311
end
193312

313+
"""
314+
set_optimization_parameter!(dopt::DictParameters, parameter_name, value)
315+
316+
Set a specific optimization parameter in the `DictParameters` object. This
317+
function will update the value of the parameter in the `dopt.parameters` dictionary.
318+
"""
194319
function set_optimization_parameter!(dopt::DictParameters, parameter_name, value)
195320
set_nested_dict_value!(dopt.parameters, parameter_name, value)
196321
end

src/DictOptimization/types.jl

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,31 @@ mutable struct DictParameters
2222
active_type
2323
setup_function
2424
history
25-
function DictParameters(parameters::AbstractDict, setup_function = missing;
25+
@doc"""
26+
DictParameters(parameters)
27+
DictParameters(parameters::AbstractDict, setup_function = missing;
28+
strict = true,
29+
verbose = true,
30+
active_type = Float64
31+
)
32+
33+
Set up a `DictParameters` object for optimization. Optionally, the setup
34+
function that takes an instance with the same keys as `parameters` together
35+
with a `step_info` dictionary can be provided. The setup function should
36+
return a `JutulCase` set up from the parameters in the Dict.
37+
38+
Optional keyword arguments:
39+
- `strict`: If true, the optimization will throw an error if any of the
40+
parameters are not set with at least one of the upper or lower bounds.
41+
- `verbose`: If true, the optimization will print information about the
42+
optimization process.
43+
- `active_type`: The type of the parameters that are considered active in
44+
the optimization. Defaults to `Float64`. This is used to determine which
45+
parameters are active and should be optimized. This means that all entries
46+
(and entries in nested dictionaries) of the `parameters` dictionary must
47+
be of this type or an array with this type as element type.
48+
"""
49+
function DictParameters(parameters::AbstractDict, setup_function = missing;
2650
strict = true,
2751
verbose = true,
2852
active_type = Float64

0 commit comments

Comments
 (0)