From c6246f21c4b7103962a22e202e0cb00da71d42bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 14:25:42 +0200 Subject: [PATCH 01/11] chore: add clean ARCHITECTURE file --- ARCHITECTURE.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 ARCHITECTURE.md diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 0000000..2cac681 --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,3 @@ +# Architecture + +To be added (see [matklad.github.io](https://matklad.github.io//2021/02/06/ARCHITECTURE.md.html)). From 4f700c5067385c5d77cfd97888ad672788cdffed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 17:14:44 +0200 Subject: [PATCH 02/11] refactor: add a dummy folder that we can point paths to This is used if IESoptLibrary is not available, to prevent extensive workarounds related to paths. --- src/utils/dummy/.gitignore | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 src/utils/dummy/.gitignore diff --git a/src/utils/dummy/.gitignore b/src/utils/dummy/.gitignore new file mode 100644 index 0000000..c96a04f --- /dev/null +++ b/src/utils/dummy/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file From 488e21513016cda9cf24a128e2adc0572b5d1eea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 17:16:36 +0200 Subject: [PATCH 03/11] feat: add initial version of core IESopt.jl, based on the internal commit 216607e3f7e61af5aae4e6fb1faf6470e7a5a385 (branch: open-source), with some slight modifications, and missing some functionality that has not been cleaned up yet --- src/IESopt.jl | 713 ++++++++++++++++++++++++- src/config/config.jl | 62 +++ src/config/files.jl | 7 + src/config/names.jl | 4 + src/config/optimization.jl | 104 ++++ src/config/paths.jl | 22 + src/config/results.jl | 36 ++ src/core.jl | 401 ++++++++++++++ src/core/carrier.jl | 45 ++ src/core/connection.jl | 274 ++++++++++ src/core/connection/con_flow_bounds.jl | 111 ++++ src/core/connection/exp_pf_flow.jl | 20 + src/core/connection/obj_cost.jl | 38 ++ src/core/connection/var_flow.jl | 83 +++ src/core/decision.jl | 159 ++++++ src/core/decision/con_fixed.jl | 28 + src/core/decision/con_sos1.jl | 43 ++ src/core/decision/con_sos2.jl | 23 + src/core/decision/con_sos_value.jl | 29 + src/core/decision/obj_fixed.jl | 35 ++ src/core/decision/obj_sos.jl | 27 + src/core/decision/obj_value.jl | 20 + src/core/decision/var_fixed.jl | 15 + src/core/decision/var_sos.jl | 32 ++ src/core/decision/var_value.jl | 34 ++ src/core/expression.jl | 155 ++++++ src/core/node.jl | 236 ++++++++ src/core/node/con_last_state.jl | 87 +++ src/core/node/con_nodalbalance.jl | 188 +++++++ src/core/node/con_state_bounds.jl | 57 ++ src/core/node/exp_injection.jl | 20 + src/core/node/var_pf_theta.jl | 35 ++ src/core/node/var_state.jl | 25 + src/core/profile.jl | 192 +++++++ src/core/profile/con_value_bounds.jl | 95 ++++ src/core/profile/exp_value.jl | 19 + src/core/profile/obj_cost.jl | 35 ++ src/core/profile/var_aux_value.jl | 63 +++ src/core/snapshot.jl | 132 +++++ src/core/unit.jl | 417 +++++++++++++++ src/core/unit/con_conversion_bounds.jl | 88 +++ src/core/unit/con_ison.jl | 22 + src/core/unit/con_min_onoff_time.jl | 145 +++++ src/core/unit/con_ramp.jl | 47 ++ src/core/unit/con_ramp_limit.jl | 51 ++ src/core/unit/con_startup.jl | 41 ++ src/core/unit/obj_marginal_cost.jl | 31 ++ src/core/unit/obj_ramp_cost.jl | 34 ++ src/core/unit/obj_startup_cost.jl | 26 + src/core/unit/var_conversion.jl | 188 +++++++ src/core/unit/var_ison.jl | 39 ++ src/core/unit/var_ramp.jl | 38 ++ src/core/unit/var_startup.jl | 44 ++ src/opt/benders.jl | 466 ++++++++++++++++ src/opt/opt.jl | 2 + src/opt/sddp.jl | 427 +++++++++++++++ src/opt/stochastic.jl | 352 ++++++++++++ src/parser.jl | 542 +++++++++++++++++++ src/precompile/precompile_tools.jl | 16 + src/results/extract.jl | 71 +++ src/results/jld2.jl | 111 ++++ src/results/results.jl | 2 + src/templates/functions/finalize.jl | 84 +++ src/templates/functions/functions.jl | 3 + src/templates/functions/prepare.jl | 39 ++ src/templates/functions/validate.jl | 97 ++++ src/templates/load.jl | 93 ++++ src/templates/parse.jl | 317 +++++++++++ src/templates/templates.jl | 125 +++++ src/texify/constraints.jl | 116 ++++ src/texify/print.jl | 352 ++++++++++++ src/texify/texify.jl | 99 ++++ src/texify/variables.jl | 77 +++ src/utils/docs.jl | 134 +++++ src/utils/general.jl | 364 +++++++++++++ src/utils/logging.jl | 90 ++++ src/utils/overview.jl | 51 ++ src/utils/packing.jl | 264 +++++++++ src/utils/utilities/Utilities.jl | 73 +++ src/utils/utilities/model_wrapper.jl | 25 + src/utils/utils.jl | 31 ++ src/validation/addons/addons.jl | 18 + src/validation/core/carrier.jl | 11 + src/validation/core/connection.jl | 11 + src/validation/core/core.jl | 34 ++ src/validation/core/decision.jl | 11 + src/validation/core/node.jl | 11 + src/validation/core/profile.jl | 11 + src/validation/core/snapshot.jl | 11 + src/validation/core/template.jl | 11 + src/validation/core/unit.jl | 11 + src/validation/validation.jl | 90 ++++ src/validation/yaml/iesopt.jl | 82 +++ src/validation/yaml/iesopt.param.jl | 18 + src/validation/yaml/iesopt.template.jl | 43 ++ src/validation/yaml/yaml.jl | 17 + 96 files changed, 9727 insertions(+), 1 deletion(-) create mode 100644 src/config/config.jl create mode 100644 src/config/files.jl create mode 100644 src/config/names.jl create mode 100644 src/config/optimization.jl create mode 100644 src/config/paths.jl create mode 100644 src/config/results.jl create mode 100644 src/core.jl create mode 100644 src/core/carrier.jl create mode 100644 src/core/connection.jl create mode 100644 src/core/connection/con_flow_bounds.jl create mode 100644 src/core/connection/exp_pf_flow.jl create mode 100644 src/core/connection/obj_cost.jl create mode 100644 src/core/connection/var_flow.jl create mode 100644 src/core/decision.jl create mode 100644 src/core/decision/con_fixed.jl create mode 100644 src/core/decision/con_sos1.jl create mode 100644 src/core/decision/con_sos2.jl create mode 100644 src/core/decision/con_sos_value.jl create mode 100644 src/core/decision/obj_fixed.jl create mode 100644 src/core/decision/obj_sos.jl create mode 100644 src/core/decision/obj_value.jl create mode 100644 src/core/decision/var_fixed.jl create mode 100644 src/core/decision/var_sos.jl create mode 100644 src/core/decision/var_value.jl create mode 100644 src/core/expression.jl create mode 100644 src/core/node.jl create mode 100644 src/core/node/con_last_state.jl create mode 100644 src/core/node/con_nodalbalance.jl create mode 100644 src/core/node/con_state_bounds.jl create mode 100644 src/core/node/exp_injection.jl create mode 100644 src/core/node/var_pf_theta.jl create mode 100644 src/core/node/var_state.jl create mode 100644 src/core/profile.jl create mode 100644 src/core/profile/con_value_bounds.jl create mode 100644 src/core/profile/exp_value.jl create mode 100644 src/core/profile/obj_cost.jl create mode 100644 src/core/profile/var_aux_value.jl create mode 100644 src/core/snapshot.jl create mode 100644 src/core/unit.jl create mode 100644 src/core/unit/con_conversion_bounds.jl create mode 100644 src/core/unit/con_ison.jl create mode 100644 src/core/unit/con_min_onoff_time.jl create mode 100644 src/core/unit/con_ramp.jl create mode 100644 src/core/unit/con_ramp_limit.jl create mode 100644 src/core/unit/con_startup.jl create mode 100644 src/core/unit/obj_marginal_cost.jl create mode 100644 src/core/unit/obj_ramp_cost.jl create mode 100644 src/core/unit/obj_startup_cost.jl create mode 100644 src/core/unit/var_conversion.jl create mode 100644 src/core/unit/var_ison.jl create mode 100644 src/core/unit/var_ramp.jl create mode 100644 src/core/unit/var_startup.jl create mode 100644 src/opt/benders.jl create mode 100644 src/opt/opt.jl create mode 100644 src/opt/sddp.jl create mode 100644 src/opt/stochastic.jl create mode 100644 src/parser.jl create mode 100644 src/precompile/precompile_tools.jl create mode 100644 src/results/extract.jl create mode 100644 src/results/jld2.jl create mode 100644 src/results/results.jl create mode 100644 src/templates/functions/finalize.jl create mode 100644 src/templates/functions/functions.jl create mode 100644 src/templates/functions/prepare.jl create mode 100644 src/templates/functions/validate.jl create mode 100644 src/templates/load.jl create mode 100644 src/templates/parse.jl create mode 100644 src/templates/templates.jl create mode 100644 src/texify/constraints.jl create mode 100644 src/texify/print.jl create mode 100644 src/texify/texify.jl create mode 100644 src/texify/variables.jl create mode 100644 src/utils/docs.jl create mode 100644 src/utils/general.jl create mode 100644 src/utils/logging.jl create mode 100644 src/utils/overview.jl create mode 100644 src/utils/packing.jl create mode 100644 src/utils/utilities/Utilities.jl create mode 100644 src/utils/utilities/model_wrapper.jl create mode 100644 src/utils/utils.jl create mode 100644 src/validation/addons/addons.jl create mode 100644 src/validation/core/carrier.jl create mode 100644 src/validation/core/connection.jl create mode 100644 src/validation/core/core.jl create mode 100644 src/validation/core/decision.jl create mode 100644 src/validation/core/node.jl create mode 100644 src/validation/core/profile.jl create mode 100644 src/validation/core/snapshot.jl create mode 100644 src/validation/core/template.jl create mode 100644 src/validation/core/unit.jl create mode 100644 src/validation/validation.jl create mode 100644 src/validation/yaml/iesopt.jl create mode 100644 src/validation/yaml/iesopt.param.jl create mode 100644 src/validation/yaml/iesopt.template.jl create mode 100644 src/validation/yaml/yaml.jl diff --git a/src/IESopt.jl b/src/IESopt.jl index e403cde..5fd6239 100644 --- a/src/IESopt.jl +++ b/src/IESopt.jl @@ -1,5 +1,716 @@ +""" + IESopt + +A general purpose solver agnostic energy system optimization framework. +""" module IESopt -# Write your package code here. +# Required for installing/loading solvers, and proper precompilation. +import Pkg +using PrecompileTools: @setup_workload, @compile_workload + +_is_precompiling() = ccall(:jl_generating_output, Cint, ()) == 1 + +# Setup `IESoptLib.jl`, if available. +const Library = try + Base.require(IESopt, :IESoptLib) +catch + if _is_precompiling() + else + @warn "`IESoptLib` is not installed; install it manually if you want to use its functionality." + end + nothing +end + +# Constant paths that might be used somewhere. +const _dummy_path = normpath(@__DIR__, "utils", "dummy") +const _PATHS = Dict{Symbol, String}( + :src => normpath(@__DIR__), + :addons => isnothing(Library) ? _dummy_path : Library.get_path(:addons), + :examples => isnothing(Library) ? _dummy_path : Library.get_path(:examples), + :docs => normpath(@__DIR__, "..", "docs"), + :test => normpath(@__DIR__, "..", "test"), + :templates => isnothing(Library) ? _dummy_path : Library.get_path(:templates), +) + +# Currently we have a proper automatic resolver for the following solver interfaces: +const _ALL_SOLVER_INTERFACES = ["HiGHS", "Gurobi", "Cbc", "GLPK", "CPLEX", "Ipopt", "SCIP"] + +# Required for logging, validation, and suppressing unwanted output. +using Logging +import LoggingExtras +using Suppressor +import ArgCheck + +# Used to "hotload" code (e.g., addons, Core Templates). +using RuntimeGeneratedFunctions +RuntimeGeneratedFunctions.init(@__MODULE__) + +# Required during the "build" step, showing progress. +using ProgressMeter + +using OrderedCollections + +# Required to generate dynamic docs of Core Components. +import Base.Docs +import Markdown + +# Everything JuMP / optimization related. +import JuMP, JuMP.@variable, JuMP.@expression, JuMP.@constraint, JuMP.@objective +import MultiObjectiveAlgorithms as MOA +const MOI = JuMP.MOI + +# File (and filesystem/git) and data format handling. +import YAML +import JSON +import DataFrames +import CSV +import JLD2 +import LibGit2 +import ZipFile + +# Used in Benders/Stochastic. +import Printf +import Dates + +include("utils/utils.jl") +include("config/config.jl") +include("core.jl") +include("parser.jl") +include("opt/opt.jl") +include("results/results.jl") +include("validation/validation.jl") +include("templates/templates.jl") +include("texify/texify.jl") + +function _build_model!(model::JuMP.Model; callbacks::Union{Nothing, Dict}) + if _iesopt_config(model).optimization.high_performance + if model.set_string_names_on_creation + @info "Overwriting `string_names_on_creation` to `false` since `high_performance` is set" + end + JuMP.set_string_names_on_creation(model, false) + end + + # This specifies the order in which components are built. This ensures that model parts that are used later on, are + # already initialized (e.g. constructing a constraint may use expressions and variables). + build_order = [ + _setup!, + _construct_expressions!, + _after_construct_expressions!, + _construct_variables!, + _after_construct_variables!, + _construct_constraints!, + _after_construct_constraints!, + _construct_objective!, + ] + + # TODO: care about components/global addons returning false somewhere + + @info "Preparing components" + + # Place Decisions first, since those need to be built before everything else. + corder = Vector{_CoreComponent}( + collect(component for component in values(_iesopt(model).model.components) if component isa Decision), + ) + append!( + corder, + collect(component for component in values(_iesopt(model).model.components) if !(component isa Decision)), + ) + + @info "Start creating JuMP model" + components_with_addons = [] + for f in build_order + # Construct all components, building them in the necessary order. + progress_map( + corder; + mapfun=foreach, + progress=Progress(length(corder); enabled=_iesopt_config(model).progress, desc="$(Symbol(f)) ..."), + ) do component + _iesopt(model).debug = component.name + f(component) + + if f == _setup! + !isnothing(component.addon) && push!(components_with_addons, component) + end + + if f == _construct_objective! + component.init_state[] = :initialized + end + end + + # Call global addons + if _has_addons(model) + for (name, prop) in _iesopt(model).input.addons + (f == _setup!) && Base.invokelatest(prop.addon.setup!, model, prop.config["__settings__"]) + (f == _construct_expressions!) && + Base.invokelatest(prop.addon.construct_expressions!, model, prop.config["__settings__"]) + (f == _construct_variables!) && + Base.invokelatest(prop.addon.construct_variables!, model, prop.config["__settings__"]) + (f == _construct_constraints!) && + Base.invokelatest(prop.addon.construct_constraints!, model, prop.config["__settings__"]) + (f == _construct_objective!) && + Base.invokelatest(prop.addon.construct_objective!, model, prop.config["__settings__"]) + end + end + + # Call any registered callbacks + if !isnothing(callbacks) + if f == _construct_expressions! + if haskey(callbacks, :post_expression) + callbacks[:post_expression](model) + end + end + end + end + + # Call relevant addons (if any). + for component in components_with_addons + # todo: check if `invokelatest` is the fastest method + # todo: check if there is something more performant to "dynamically load the code" + # todo: check the return value (false means errors!) + @profile model Base.invokelatest(_iesopt(model).input.files[component.addon].build, component) + end + + # Call finalization functions of all Core Templates. + for (name, entry) in _iesopt(model).results._templates + entry.finalize(model, name, entry.parameters) + end + + # Construct relevant ETDF constraints. + if !isempty(_iesopt(model).aux.etdf.groups) + @error "ETDF constraints are currently not supported" + # for (etdf_group, node_ids) in _iesopt(model).aux.etdf.groups + # _iesopt(model).aux.etdf.constr[etdf_group] = @constraint( + # model, + # [t = _iesopt(model).model.T], + # sum(_iesopt(model).model.components[id].exp.injection[t] for id in node_ids) == 0 + # ) + # end + end + + # Building the objective(s). + for (name, obj) in _iesopt(model).model.objectives + @info "Preparing objective" name + + # Add all terms that were added from within a component definition to the correct objective's terms. + for term in _iesopt(model).aux._obj_terms[name] + if term isa Number + push!(obj.constants, term) + else + comp, proptype, prop = rsplit(term, "."; limit=3) + field = getproperty(getproperty(component(model, comp), Symbol(proptype)), Symbol(prop)) + if field isa Vector + push!(obj.terms, sum(field)) + else + push!(obj.terms, field) + end + end + end + + # todo: is there a faster way to sum up a set of expressions? + @info "Building objective" name + for term in obj.terms + JuMP.add_to_expression!(obj.expr, term) + end + if !isempty(obj.constants) + JuMP.add_to_expression!(obj.expr, sum(obj.constants)) + end + end + + if !_is_multiobjective(model) + current_objective = _iesopt_config(model).optimization.objective.current + isnothing(current_objective) && @critical "Missing an active objective" + @objective(model, Min, _iesopt(model).model.objectives[current_objective].expr) + else + @objective( + model, + Min, + [ + _iesopt(model).model.objectives[obj].expr for + obj in _iesopt_config(model).optimization.multiobjective.terms + ] + ) + end +end + +function _prepare_model!(model::JuMP.Model) + # Potentially remove components that are tagged `conditional`, and violate some of their conditions. + failed_components = [] + for (cname, component) in _iesopt(model).model.components + !(@profile model _check(component)) && push!(failed_components, cname) + end + if length(failed_components) > 0 + @warn "Some components are removed based on the `conditional` setting" n_components = length(failed_components) + for cname in failed_components + delete!(_iesopt(model).model.components, cname) + end + end + + # Init global addons before preparing components + if _has_addons(model) + for (name, prop) in _iesopt(model).input.addons + prop.config["__settings__"] = Base.invokelatest(prop.addon.initialize!, model, prop.config) + if isnothing(prop.config["__settings__"]) + @critical "Addon failed to set up" name + end + end + end + + # Fully prepare each component. + all_components_ok = true + for (id, component) in _iesopt(model).model.components + all_components_ok &= _prepare!(component) + end + if !all_components_ok + error("Some components did not pass the preparation step.") + end +end + +""" + run(filename::String; verbosity=nothing, kwargs...) + +Build, optimize, and return a model. + +# Arguments + +- `filename::String`: The path to the top-level configuration file. +- `verbosity`: The verbosity level to use. Supports `true` (= verbose mode), `"warning"` (= warnings and above), and + `false` (suppressing logs). + +If `verbosity = true`, the verbosity setting of the solver defaults to `true` as well, otherwise it defaults to `false` +(the verbosity setting of the solver can also be directly controled using the `verbosity_solve` setting in the top-level +config file). + +# Keyword Arguments + +Keyword arguments are passed to the [`generate!`](@ref) function. +""" +function run(filename::String; verbosity=nothing, kwargs...) + model = generate!(filename; verbosity=verbosity, kwargs...) + optimize!(model) + + return model +end + +""" + generate!(filename::String) + +Builds and returns a model using the IESopt framework. + +This loads the configuration file specified by `filename`. Requires full specification of the `solver` entry in config. +""" +function generate!(filename::String; verbosity=nothing, kwargs...) + model = JuMP.Model() + return generate!(model, filename; verbosity=verbosity, kwargs...) +end + +""" + generate!(model::JuMP.Model, filename::String) + +Builds a model using the IESopt framework, "into" the provided `model`. + +This loads the configuration file specified by `filename`. Be careful when creating your `model` in any other way than +in the provided examples, as this can conflict with IESopt internals (especially for model/optimizer combinations +that do not support bridges). Returns the model for convenience, even though it is modified in place. +""" +function generate!(model::JuMP.Model, filename::String; verbosity=nothing, kwargs...) + local stats_parse, stats_build, stats_total + + try + # Validate before parsing. + !validate(filename) && return nothing + + # Parse & build the model. + stats_total = @timed begin + stats_parse = @timed parse!(model, filename; verbosity, kwargs...) + !stats_parse.value && return nothing + if JuMP.mode(model) != JuMP.DIRECT && JuMP.MOIU.state(JuMP.backend(model)) == JuMP.MOIU.NO_OPTIMIZER + with_logger(_iesopt(model).logger) do + return _attach_optimizer(model) + end + end + stats_build = @timed with_logger(_iesopt(model).logger) do + return @profile build!(model) + end + end + catch + # Get debug information from model, if available. + debug = haskey(model.ext, :iesopt) ? _iesopt(model).debug : "not available" + debug = isnothing(debug) ? "not available" : debug + + # Get ALL current exceptions. + curr_ex = current_exceptions() + + # These modules are automatically removed from the backtrace that is shown. + remove_modules = [:VSCodeServer, :Base, :CoreLogging] + + # Prepare all exceptions. + _exceptions = [] + for (exception, backtrace) in curr_ex + trace = stacktrace(backtrace) + + # Debug log the full backtrace. + @debug "Details on error #$(length(_exceptions) + 1)" error = (exception, trace) + + # Error log the backtrace, but remove modules that only clutter the trace. + trace = [e for e in trace if !isnothing(parentmodule(e)) && !(nameof(parentmodule(e)) in remove_modules)] + push!( + _exceptions, + Symbol(" = = = = = = = = = [ Error #$(length(_exceptions) + 1) ] = = = = = = = =") => + (exception, trace), + ) + end + + @error "Error(s) during model generation" debug number_of_errors = length(curr_ex) _exceptions... + else + with_logger(_iesopt(model).logger) do + @info "Finished model generation" times = + (parse=stats_parse.time, build=stats_build.time, total=stats_total.time) + end + end + + return model +end + +function _attach_optimizer(model::JuMP.Model) + @info "Setting up Optimizer" + solver = get( + # note: when adding a solver here, add its import at the top + Dict{String, String}( + "highs" => "HiGHS", + "gurobi" => "Gurobi", + "cbc" => "Cbc", + "glpk" => "GLPK", + "cplex" => "CPLEX", + "ipopt" => "Ipopt", + "scip" => "SCIP", + ), + lowercase(_iesopt_config(model).optimization.solver.name), + nothing, + ) + + if isnothing(solver) + @critical "Can't determine proper solver" solver + end + + try + Base.require(IESopt, Symbol(solver)) + catch + @error "It seems the requested solver is not installed; trying to install and precompile it" solver + try + Pkg.add(solver) + Base.require(IESopt, Symbol(solver)) + catch + @critical "Could not install the requested solver" solver + end + @critical "Solver installed, but can not proceed from here; please execute your code again" + end + + # withpkg(f, pkgid::Base.PkgId) = Base.invokelatest(f, IESopt.require(pkgid)) + + # withpkg(Base.PkgId(Pkg.Types.Context().env.project.deps[solver], solver)) do s + let s = getfield(IESopt, Symbol(solver)) + if !_is_multiobjective(model) + if _iesopt_config(model).optimization.solver.mode == "direct" + @critical "Automatic direct mode is currently not supported" + # todo: we can use an "isempty" function that ignores :ext to check for an empty model, + # then create a new one and copy over the :ext dict + else + @info "Activating solver" solver + JuMP.set_optimizer(model, s.Optimizer) + end + else + if _iesopt_config(model).optimization.solver.mode == "direct" + @critical "Multi-objective optimization currently does not support direct mode" + else + @info "Activating solver in multi-objective mode" solver + JuMP.set_optimizer(model, () -> MOA.Optimizer(s.Optimizer)) + + moa_mode = _iesopt_config(model).optimization.multiobjective.mode + @info "Setting MOA mode" mode = moa_mode + JuMP.set_attribute(model, MOA.Algorithm(), eval(Meta.parse("MOA.$moa_mode()"))) + end + end + end + + # todo: we currently need to abort here, but there SHOULD be some way to continue without running into world-age + # related problems, etc. + + for (attr, value) in _iesopt_config(model).optimization.solver.attributes + try + @suppress JuMP.set_attribute(model, attr, value) + @info "Setting attribute" attr value + catch + @error "Failed to set attribute" attr value + end + end + + if !isnothing(_iesopt_config(model).optimization.multiobjective) + for (attr, value) in _iesopt_config(model).optimization.multiobjective.settings + try + if value isa Vector + for i in eachindex(value) + JuMP.set_attribute(model, eval(Meta.parse("$attr($i)")), value[i]) + end + else + JuMP.set_attribute(model, eval(Meta.parse("$attr()")), value) + end + @info "Setting attribute" attr value + catch + @error "Failed to set attribute" attr value + end + end + end + + return nothing +end + +function parse!(model::JuMP.Model, filename::String; verbosity=nothing, kwargs...) + if !endswith(filename, ".iesopt.yaml") + @critical "Model entry config files need to respect the `.iesopt.yaml` file extension" filename + end + + # convert to `Pairs` with `Symbol` keys to `Dict{String, Any}` + global_parameters = Dict{String, Any}(String(k) => v for (k, v) in kwargs) + + # Load the model specified by `filename`. + _parse_model!(model, filename, global_parameters; verbosity) || (@critical "Error while parsing model" filename) + + return true +end + +function build!(model; callbacks::Union{Nothing, Dict}=nothing) + # Prepare the model, ensuring some conversions before consistency checks. + @profile _prepare_model!(model) + + # Perform conistency checks on all parsed components. + all_components_ok = true + for (id, component) in _iesopt(model).model.components + all_components_ok &= _isvalid(component) + end + if !all_components_ok + error("Some components did not pass the consistency check.") + end + + # Build the model. + @profile _build_model!(model; callbacks=callbacks) + + @info "Profiling results after `build` [time, top 5]" _profiling_format_top(model, 5)... +end + +""" + optimize!(model::JuMP.Model; save_results::Bool=true, kwargs...) + +Use `JuMP.optimize!` to optimize the given model, optionally serializing the model afterwards for later use. +""" +function optimize!(model::JuMP.Model; kwargs...) + with_logger(_iesopt(model).logger) do + return @profile _optimize!(model; kwargs...) + end +end + +function _optimize!(model::JuMP.Model; kwargs...) + if !isempty(_iesopt(model).aux.constraint_safety_penalties) + @info "Relaxing constraints based on constraint_safety" + _iesopt(model).aux.constraint_safety_expressions = @profile JuMP.relax_with_penalty!( + model, + Dict(k => v.penalty for (k, v) in _iesopt(model).aux.constraint_safety_penalties), + ) + end + + # Enable or disable solver output + if _iesopt_config(model).verbosity_solve + JuMP.unset_silent(model) + else + JuMP.set_silent(model) + end + + # Logging solver output. + if _iesopt_config(model).optimization.solver.log + # todo: replace this with a more general approach + log_file = abspath(_iesopt_config(model).paths.results, "$(_iesopt_config(model).names.scenario).solverlog") + rm(log_file; force=true) + if JuMP.solver_name(model) == "Gurobi" + @info "Logging solver output" log_file + JuMP.set_attribute(model, "LogFile", log_file) + elseif JuMP.solver_name(model) == "HiGHS" + @info "Logging solver output" log_file + JuMP.set_attribute(model, "log_file", log_file) + else + # todo: support MOA here + @error "Logging solver output is currently only supported for Gurobi and HiGHS" + end + end + + @info "Starting optimize ..." + @profile JuMP.optimize!(model; kwargs...) + + # todo: make use of `is_solved_and_feasible`? if, make sure the version requirement of JuMP is correct + + if JuMP.result_count(model) == 1 + if JuMP.termination_status(model) == JuMP.MOI.OPTIMAL + @info "Finished optimizing, solution optimal" + else + @error "Finished optimizing, solution non-optimal" status_code = JuMP.termination_status(model) solver_status = + JuMP.raw_status(model) + end + elseif JuMP.result_count(model) == 0 + @error "No results returned after call to `optimize!`. This most likely indicates an infeasible or unbounded model. You can check with `IESopt.compute_IIS(model)` which constraints make your model infeasible. Note: this requires a solver that supports this (e.g. Gurobi)" + return nothing + else + if !isnothing(_iesopt_config(model).optimization.multiobjective) + if JuMP.termination_status(model) == JuMP.MOI.OPTIMAL + @info "Finished optimizing, solution(s) optimal" result_count = JuMP.result_count(model) + else + @error "Finished optimizing, solution non-optimal" status_code = JuMP.termination_status(model) solver_status = + JuMP.raw_status(model) + end + else + @warn "Unexpected result count after call to `optimize!`" result_count = JuMP.result_count(model) status_code = + JuMP.termination_status(model) solver_status = JuMP.raw_status(model) + end + end + + # Analyse constraint safety results + if !isempty(_iesopt(model).aux.constraint_safety_penalties) + relaxed_components = Vector{String}() + for (k, v) in _iesopt(model).aux.constraint_safety_penalties + # Skip components that we already know about being relaxed. + (v.component_name ∈ relaxed_components) && continue + + if JuMP.value(_iesopt(model).aux.constraint_safety_expressions[k]) > 0 + push!(relaxed_components, v.component_name) + end + end + + if !isempty(relaxed_components) + @warn "The safety constraint feature triggered" n_components = length(relaxed_components) components = "[$(relaxed_components[1]), ...]" + @info "You can further analyse the relaxed components by looking at the `constraint_safety_penalties` and `constraint_safety_expressions` entries in `model.ext`." + end + end + + if _iesopt_config(model).results.enabled + if !JuMP.is_solved_and_feasible(model) + @error "Extracting results is only possible for a solved and feasible model" + else + @profile _extract_results(model) + @profile _save_results(model) + end + end + + @info "Profiling results after `optimize` [time, top 5]" _profiling_format_top(model, 5)... + return nothing +end + +""" + function compute_IIS(model::JuMP.Model; filename::String = "") + +Compute the IIS and print it. If `filename` is specified it will instead write all constraints to the given file. This +will fail if the solver does not support IIS computation. +""" +function compute_IIS(model::JuMP.Model; filename::String="") + print = false + if filename === "" + print = true + end + + JuMP.compute_conflict!(model) + conflict_constraint_list = JuMP.ConstraintRef[] + for (F, S) in JuMP.list_of_constraint_types(model) + for con in JuMP.all_constraints(model, F, S) + if JuMP.MOI.get(model, JuMP.MOI.ConstraintConflictStatus(), con) == JuMP.MOI.IN_CONFLICT + if print + println(con) + else + push!(conflict_constraint_list, con) + end + end + end + end + + if !print + io = open(filename, "w") do io + for con in conflict_constraint_list + println(io, con) + end + end + end + + return nothing +end + +""" + function component(model::JuMP.Model, component_name::String) + +Get the component `component_name` from `model`. +""" +function component(model::JuMP.Model, component_name::AbstractString) + if !haskey(_iesopt(model).model.components, component_name) + st = stacktrace() + trigger = length(st) > 0 ? st[1] : nothing + origin = length(st) > 1 ? st[2] : nothing + inside = length(st) > 2 ? st[3] : nothing + @critical "Trying to access unknown component" component_name trigger origin inside debug = _iesopt_debug(model) + end + + return _iesopt(model).model.components[component_name] +end + +function extract_result(model::JuMP.Model, component_name::String, field::String; mode::String) + return _result(component(model, component_name), mode, field)[2] +end + +""" + function to_table(model::JuMP.Model; path::String = "./out") + +Turn `model` into a set of CSV files containing all core components that represent the model. + +This can be useful by running +```julia +IESopt.parse!(model, filename) +IESopt.to_table(model) +``` +which will parse the model given by `filename`, without actually building it (which saves a lot of time), and will +output a complete "description" in core components (that are the resolved version of all non-core components). + +If `write_to_file` is `false` it will instead return a dictionary of all DataFrames. +""" +function to_table(model::JuMP.Model; path::String="./out", write_to_file::Bool=true) + tables = Dict( + Connection => Vector{OrderedDict{Symbol, Any}}(), + Decision => Vector{OrderedDict{Symbol, Any}}(), + Node => Vector{OrderedDict{Symbol, Any}}(), + Profile => Vector{OrderedDict{Symbol, Any}}(), + Unit => Vector{OrderedDict{Symbol, Any}}(), + ) + + for (id, component) in _iesopt(model).model.components + push!(tables[typeof(component)], _to_table(component)) + end + + if write_to_file + for (type, table) in tables + CSV.write(normpath(_iesopt_config(model).paths.main, path, "$type.csv"), DataFrames.DataFrame(table)) + end + return nothing + end + + return Dict{Type, DataFrames.DataFrame}(type => DataFrames.DataFrame(table) for (type, table) in tables) +end + +# This is directly taken from JuMP.jl and exports all internal symbols that do not start with an underscore (roughly). +const _EXCLUDE_SYMBOLS = [Symbol(@__MODULE__), :eval, :include] +for sym in names(@__MODULE__; all=true) + sym_string = string(sym) + if sym in _EXCLUDE_SYMBOLS || startswith(sym_string, "_") || startswith(sym_string, "@_") + continue + end + if !(Base.isidentifier(sym) || (startswith(sym_string, "@") && Base.isidentifier(sym_string[2:end]))) + continue + end + @eval export $sym +end + +include("precompile/precompile_tools.jl") end diff --git a/src/config/config.jl b/src/config/config.jl new file mode 100644 index 0000000..2c55608 --- /dev/null +++ b/src/config/config.jl @@ -0,0 +1,62 @@ +include("names.jl") +include("paths.jl") +include("files.jl") +include("results.jl") +include("optimization.jl") + +struct _Config + names::_ConfigNames + + optimization::_ConfigOptimization + files::_ConfigFiles + results::_ConfigResults + paths::_ConfigPaths + + progress::Bool + verbosity::Union{String, Bool} + verbosity_solve::Bool + + parametric::Bool # todo: remove / refactor after "expression rework" +end + +function _Config(model::JuMP.Model) + config = _iesopt(model).input._tl_yaml["config"] + + model_path = model.ext[:_iesopt_wd] + verbosity = model.ext[:_iesopt_verbosity] + + names_str = ( + if !haskey(config, "name") + ("my_model", "scenario_\$TIME\$") + else + if haskey(config["name"], "run") + @warn "Using `run` in the `name` section of the configuration is deprecated, use `scenario` instead" + end + + name_model = get(config["name"], "model", "my_model") + name_scenario = get(config["name"], "scenario", get(config["name"], "run", "scenario_\$TIME\$")) + (name_model, name_scenario) + end + ) + names = _ConfigNames(replace.(names_str, "\$TIME\$" => Dates.format(Dates.now(), "yyyy_mm_dd_HHMMSSs"))...) + paths = _ConfigPaths(get(config, "paths", Dict{String, Any}()), model_path, names) + + verbosity = isnothing(verbosity) ? get(config, "verbosity", true) : verbosity + return _Config( + names, + _ConfigOptimization(get(config, "optimization", Dict{String, Any}())), + _ConfigFiles(get(config, "files", Dict{String, Any}()), paths), + _ConfigResults(get(config, "results", Dict{String, Any}())), + paths, + get(config, "progress", verbosity === true), + verbosity, + get(config, "verbosity_solve", verbosity === true), + false, + ) +end + +_has_representative_snapshots(model::JuMP.Model) = + !isnothing(_iesopt_config(model).optimization.snapshots.representatives) +_is_multiobjective(model::JuMP.Model) = (:mo in _iesopt_config(model).optimization.problem_type) +_is_lp(model::JuMP.Model) = (:lp in _iesopt_config(model).optimization.problem_type) +_is_milp(model::JuMP.Model) = (:milp in _iesopt_config(model).optimization.problem_type) diff --git a/src/config/files.jl b/src/config/files.jl new file mode 100644 index 0000000..d8ffec4 --- /dev/null +++ b/src/config/files.jl @@ -0,0 +1,7 @@ +struct _ConfigFiles + entries::Dict{String, String} +end + +function _ConfigFiles(config::Dict{String, Any}, paths::_ConfigPaths) + return _ConfigFiles(Dict(k => normpath(v) for (k, v) in config)) +end diff --git a/src/config/names.jl b/src/config/names.jl new file mode 100644 index 0000000..49cfa55 --- /dev/null +++ b/src/config/names.jl @@ -0,0 +1,4 @@ +struct _ConfigNames + model::String + scenario::String +end diff --git a/src/config/optimization.jl b/src/config/optimization.jl new file mode 100644 index 0000000..3cac2a8 --- /dev/null +++ b/src/config/optimization.jl @@ -0,0 +1,104 @@ +struct _ConfigSolver + name::String + mode::String + log::Bool + attributes::Dict{String, Any} +end + +struct _ConfigSnapshots + count::Int64 + offset::Int64 + + names::Union{String, Nothing} + weights::Union{String, Float64, Nothing} + + representatives::Union{String, Nothing} + aggregate::Union{Float64, Nothing} +end + +struct _ConfigObjective + current::Union{String, Nothing} + functions::Dict{String, Vector{String}} +end + +struct _ConfigMultiObjective + mode::String + terms::Vector{String} + settings::Dict{String, Any} +end + +struct _ConfigOptimization + problem_type::Set{Symbol} + snapshots::_ConfigSnapshots + solver::_ConfigSolver + + objective::_ConfigObjective + multiobjective::Union{_ConfigMultiObjective, Nothing} + + constraint_safety::Bool + constraint_safety_cost::Float64 + + high_performance::Bool +end + +function _ConfigOptimization(config::Dict{String, Any}) + problem_types = Set(Symbol.(split(lowercase(config["problem_type"]), "+"))) + + return _ConfigOptimization( + problem_types, + _ConfigSnapshots(get(config, "snapshots", Dict{String, Any}())), + _ConfigSolver(get(config, "solver", Dict{String, Any}())), + _ConfigObjective(config), + _ConfigMultiObjective(config), + get(config, "constraint_safety", false), + get(config, "constraint_safety_cost", 1e5), + get(config, "high_performance", false), + ) +end + +function _ConfigObjective(config::Dict{String, Any}) + objectives = get(config, "objectives", Dict{String, Vector{String}}()) + haskey(objectives, "total_cost") || (objectives["total_cost"] = Vector{String}()) + + return _ConfigObjective( + get(config, "objective", haskey(config, "multiobjective") ? nothing : "total_cost"), + objectives, + ) +end + +function _ConfigMultiObjective(config::Dict{String, Any}) + haskey(config, "multiobjective") || return nothing + + return _ConfigMultiObjective( + config["multiobjective"]["mode"], + config["multiobjective"]["terms"], + config["multiobjective"]["settings"], + ) +end + +function _ConfigSnapshots(config::Dict{String, Any}) + count = config["count"] + if _is_precompiling() + count = min(4, count) + @warn "Detected precompilation... limiting Snapshot count" original = config["count"] new = count + end + + return _ConfigSnapshots( + count, + get(config, "offset", 0), + get(config, "names", nothing), + get(config, "weights", nothing), + get(config, "representatives", nothing), + get(config, "aggregate", nothing), + ) +end + +function _ConfigSolver(config::Dict{String, Any}) + # todo: implement default attributes depending on solver + return _ConfigSolver( + lowercase(get(config, "name", "highs")), + lowercase(get(config, "mode", "normal")), + get(config, "log", true), + get(config, "attributes", Dict{String, Any}()), + ) +end diff --git a/src/config/paths.jl b/src/config/paths.jl new file mode 100644 index 0000000..433b23a --- /dev/null +++ b/src/config/paths.jl @@ -0,0 +1,22 @@ +struct _ConfigPaths + main::String + + files::String + results::String + templates::String + components::String + addons::String +end + +function _ConfigPaths(config::Dict{String, Any}, model_path::String, names::_ConfigNames) + model_path = normpath(replace(model_path, '\\' => '/')) + + return _ConfigPaths( + model_path, + normpath(model_path, replace(get(config, "files", "files"), '\\' => '/')), + normpath(model_path, replace(get(config, "results", "out"), '\\' => '/'), names.model), + normpath(model_path, replace(get(config, "templates", "templates"), '\\' => '/')), + normpath(model_path, replace(get(config, "components", "components"), '\\' => '/')), + normpath(model_path, replace(get(config, "addons", "addons"), '\\' => '/')), + ) +end diff --git a/src/config/results.jl b/src/config/results.jl new file mode 100644 index 0000000..ede1ef7 --- /dev/null +++ b/src/config/results.jl @@ -0,0 +1,36 @@ +struct _ConfigResults + enabled::Bool + memory_only::Bool + compress::Bool + include::Set{Symbol} +end + +function _ConfigResults(config::Dict{String, Any}) + if isempty(config) + return _ConfigResults(true, true, false, Set()) + end + + enabled = get(config, "enabled", !get(config, "disabled", false)) + if !enabled + @warn "Automatic result extraction disabled" + return _ConfigResults(false, false, false, Set()) + end + + for entry in ["document", "settings", "groups"] + haskey(config, entry) || continue + @error "The `$(entry)` entry in `results` is deprecated will not work as expected" + end + + memory_only = get(config, "memory_only", false) + compress = get(config, "compress", false) + included_modes = lowercase(get(config, "include", memory_only ? "none" : "all")) + included_modes = string.(replace(included_modes, "all" => "input+git+log")) + + if memory_only + if compress + @error "The `memory_only` and `compress` entries in `results` are incompatible; ignoring `compress`" + end + end + + return _ConfigResults(enabled, memory_only, compress, Set(Symbol.(split(included_modes, '+')))) +end diff --git a/src/core.jl b/src/core.jl new file mode 100644 index 0000000..eaed7d7 --- /dev/null +++ b/src/core.jl @@ -0,0 +1,401 @@ +# This allows falling back to the `_CoreComponent`'s name for hashing which should enable dictionaries to use the +# `_CoreComponent` as key with a similar performance to using the name as key. This entails that every +# component needs to implement that name - or redefine that function properly. +# See: https://discourse.julialang.org/t/haskey-dict-allocates-when-key-is-a-struct/32093/3 +# Further, it seems to be sufficient to overload `isequal` instead of also `Base.:(==)`, see: +# https://stackoverflow.com/a/34939856/5377696; https://github.com/JuliaLang/julia/issues/12198#issuecomment-122938304 +# indicates the opposite... +Base.hash(cc::_CoreComponent) = cc.name +Base.:(==)(cc1::_CoreComponent, cc2::_CoreComponent) = cc1.name == cc2.name +Base.isequal(cc1::_CoreComponent, cc2::_CoreComponent) = Base.isequal(cc1.name, cc2.name) + +# TODO: replace with https://github.com/KristofferC/TimerOutputs.jl +""" + @profile(arg1, arg2=nothing) + +This macro is used to profile the execution of a function. It captures the time, memory allocation, and number of calls +of the function. The profiling data is stored in the `_profiling` field of the `_IESoptData` structure. The identifier +passed to the macro is used to store the profiling data. If no identifier is provided, the function's name is used as +the identifier. + +Options to use this macro are: + - @profile model "identifier" foo() + - @profile model foo() + - @profile "identifier" foo(model) + - @profile foo(model) +""" +macro profile(arg1, arg2=nothing, arg3=nothing) + model = nothing + + # Determine if an identifier was provided. + if arg2 === nothing + # No identifier provided, parse the function call directly. + if isa(arg1, Expr) && arg1.head === :call + base_identifier = arg1.args[1] # Use function's name as identifier. + func_call = arg1 + if isa(func_call.args[2], Expr) + model = esc(func_call.args[3]) + else + model = esc(func_call.args[2]) + end + else + error("Invalid macro usage. Expected a function call.") + end + else + # Identifier (and/or model) provided. + if isa(arg1, String) + base_identifier = arg1 + func_call = arg2 + if isa(func_call.args[2], Expr) + model = esc(func_call.args[3]) + else + model = esc(func_call.args[2]) + end + else #if isa(arg1, JuMP.Model) + model = esc(arg1) + if isa(arg2, String) + base_identifier = arg2 + func_call = arg3 + else + base_identifier = arg2.args[1] + func_call = arg2 + end + end + end + + # Extract the function and its arguments from the func_call expression. + if !(isa(func_call, Expr) && func_call.head === :call) + error("The macro expects a function call.") + end + + func = func_call.args[1] + args = esc.(func_call.args[2:end]) + + # Generate code that runs the function within the @timed macro, capturing the profiling and saving it. + quote + local profile, profiling, identifier, method + + method = methods($func)[1] + identifier = (Symbol(method.module), string(method.file), Symbol($base_identifier)) + + profiling = _iesopt($model).aux._profiling + profile = @timed $func($(args...)) + + if haskey(profiling, identifier) + profiling[identifier].time += profile.time + profiling[identifier].bytes += profile.bytes + profiling[identifier].calls += 1 + else + profiling[identifier] = _Profiling(profile.time, profile.bytes, 1) + end + + profile.value # Return the function's return value. + end +end + +include("core/carrier.jl") +include("core/expression.jl") # this needs to come before the core components using it +include("core/connection.jl") +include("core/decision.jl") +include("core/node.jl") +include("core/profile.jl") +include("core/snapshot.jl") +include("core/unit.jl") + +# Finalize the docstrings of the core components. +_finalize_docstring(Connection) +_finalize_docstring(Decision) +_finalize_docstring(Node) +_finalize_docstring(Profile) +_finalize_docstring(Unit) + +# Here, empty implementations are done to ensure every core component type implements all necessary functionality, even +# if it does not care about that. Make sure to implement them, in order to actually use them. +_check(cc::_CoreComponent) = !cc.conditional +function _prepare!(::_CoreComponent) end +function _isvalid(cc::_CoreComponent) + @warn "_isvalid(...) not implemented" cc_type = typeof(cc) + return true +end +function _setup!(::_CoreComponent) end +function _result(::_CoreComponent, ::String, ::String; result::Int=1) end +function _to_table(component::_CoreComponent) + excluded_fields = ( + :model, + :init_state, + :config, + :ext, + :addon, + :results, + :terms, + :conversion_dict, + :conversion_at_min_dict, + :capacity_carrier, + :marginal_cost_carrier, + :total, + ) + + _hp = function (x) + if (x isa Number) || (x isa AbstractString) || (x isa Symbol) + return x + elseif x isa Carrier + return x.name + elseif x isa AbstractVector + return "[...]" + else + return "__presolved__" + end + end + + return OrderedDict{Symbol, Any}( + field => (isnothing(getfield(component, field)) ? missing : _hp(getfield(component, field))) for + field in fieldnames(typeof(component)) if + !((field in excluded_fields) || contains(String(field), r"var_|constr_|expr_|obj_")) + ) +end +function _construct_expressions!(::_CoreComponent) end +function _after_construct_expressions!(::_CoreComponent) end +function _construct_variables!(::_CoreComponent) end +function _after_construct_variables!(::_CoreComponent) end +function _construct_constraints!(::_CoreComponent) end +function _after_construct_constraints!(::_CoreComponent) end +function _construct_objective!(::_CoreComponent) end + +function filter_component(model::JuMP.Model, tags::Vector{String}) + @error "Filtering based on tags has been deprecated" + return nothing +end + +function _result_fields(component::_CoreComponent) + @error "_result_fields(...) not implemented" component = component.name + return nothing +end + +_component_type(::_CoreComponent) = nothing +_component_type(::Connection) = :Connection +_component_type(::Decision) = :Decision +_component_type(::Node) = :Node +_component_type(::Profile) = :Profile +_component_type(::Unit) = :Unit + +function Base.getproperty(cc::_CoreComponent, field::Symbol) + try + (field == :var) && (return getfield(cc, :_ccoc).variables) + (field == :con) && (return getfield(cc, :_ccoc).constraints) + (field == :exp) && (return getfield(cc, :_ccoc).expressions) + (field == :obj) && (return getfield(cc, :_ccoc).objectives) + return getfield(cc, field) + catch e + @critical "Field not found in _CoreComponent" e + end +end + +function Base.propertynames(cc::_CoreComponent) + return (propertynames(cc)..., :exp, :var, :con, :obj) +end + +function Base.getproperty(ccocd::_CoreComponentOptContainerDict, field::Symbol) + try + return getfield(ccocd, :dict)[field] + catch e + @critical "Field not found in _CoreComponentOptContainerDict" e + end +end + +function Base.setproperty!(ccocd::_CoreComponentOptContainerDict, field::Symbol, value) + return getfield(ccocd, :dict)[field] = value +end + +function Base.setindex!(ccocd::_CoreComponentOptContainerDict, value, field::Symbol) + return getfield(ccocd, :dict)[field] = value +end + +function Base.getindex(ccocd::_CoreComponentOptContainerDict, field::Symbol) + return getfield(ccocd, :dict)[field] +end + +function Base.keys(ccocd::_CoreComponentOptContainerDict) + return keys(getfield(ccocd, :dict)) +end + +# function Base.getproperty(ccoc::_CoreComponentOptContainer, field::Symbol) +# (field == :var) && (return getfield(ccoc, :variables)) +# (field == :con) && (return getfield(ccoc, :constraints)) +# (field == :exp) && (return getfield(ccoc, :expressions)) +# (field == :obj) && (return getfield(ccoc, :objectives)) + +# throw(ArgumentError("Field $field not found in _CoreComponentOptContainer")) +# end + +# function Base.setproperty!(ccoc::_CoreComponentOptContainer, field::Symbol, value) +# getfield(ccoc, :content)[field] = value +# end + +# function Base.propertynames(ccoc::_CoreComponentOptContainer) +# return propertynames(getfield(ccoc, :content)) +# end + +@kwdef struct _CoreComponentOptResultContainer + expressions = _CoreComponentOptContainerDict{Union{Float64, Vector{Float64}}}() + variables = _CoreComponentOptContainerDict{Union{Float64, Vector{Float64}}}() + constraints = _CoreComponentOptContainerDict{Union{Float64, Vector{Float64}}}() + objectives = _CoreComponentOptContainerDict{Float64}() + + results = _CoreComponentOptContainerDict{Union{Float64, Vector{Float64}}}() +end + +struct _CoreComponentResult <: _CoreComponent + _info::Dict{Symbol, Any} + _ccorc::_CoreComponentOptResultContainer +end + +function Base.getproperty(ccr::_CoreComponentResult, field::Symbol) + try + (field == :var) && (return getfield(ccr, :_ccorc).variables) + (field == :con) && (return getfield(ccr, :_ccorc).constraints) + (field == :exp) && (return getfield(ccr, :_ccorc).expressions) + (field == :obj) && (return getfield(ccr, :_ccorc).objectives) + (field == :res) && (return getfield(ccr, :_ccorc).results) + return getfield(ccr, :_info)[field] + catch e + @critical "Field not found in _CoreComponentResult" e + end +end + +function Base.propertynames(ccr::_CoreComponentResult) + return (propertynames(ccr)..., :exp, :var, :con, :obj, keys(getfield(ccr, :_info))...) +end + +_hasexp(cc::_CoreComponent, name::Symbol) = haskey(getfield(getfield(cc, :_ccoc).expressions, :dict), name) +_hasvar(cc::_CoreComponent, name::Symbol) = haskey(getfield(getfield(cc, :_ccoc).variables, :dict), name) +_hascon(cc::_CoreComponent, name::Symbol) = haskey(getfield(getfield(cc, :_ccoc).constraints, :dict), name) +_hasobj(cc::_CoreComponent, name::Symbol) = haskey(getfield(getfield(cc, :_ccoc).objectives, :dict), name) +_hasres(cc::_CoreComponent, name::Symbol) = haskey(getfield(getfield(cc, :_ccoc).results, :dict), name) + +mutable struct _Profiling + time::Float64 + bytes::Int + calls::Int +end + +mutable struct _IESoptInputData + config::Union{_Config, Nothing} + files::Dict{String, Union{Module, DataFrames.DataFrame}} + addons::Dict{Any, Any} # todo + + noncore::Dict{Symbol, Any} + stochastic::Dict{Symbol, Dict} + parameters::Dict{String, Any} + + _tl_yaml::Dict{String, Any} +end + +mutable struct _IESoptModelData + T::Vector{_ID} + components::Dict{String, _CoreComponent} + objectives::Dict{String, NamedTuple} + + snapshots::Dict{_ID, Snapshot} + carriers::Dict{String, Carrier} +end + +mutable struct _IESoptAuxiliaryData + constraint_safety_penalties::Dict{JuMP.ConstraintRef, NamedTuple} + constraint_safety_expressions::Any # todo + etdf::NamedTuple + _flattened_description::Dict{String, Any} + _obj_terms::Dict{String, Vector{Union{String, Float64}}} + _profiling::Dict{Tuple{Symbol, String, Symbol}, _Profiling} + cache::Dict{Symbol, Any} +end + +mutable struct _IESoptResultData + components::Dict{String, _CoreComponentResult} + objectives::Dict{String, Float64} + customs::Dict{Symbol, Union{Float64, Vector{Float64}}} + _templates::Dict{String, Any} +end + +mutable struct _IESoptData + input::_IESoptInputData + model::_IESoptModelData + + results::_IESoptResultData + + logger::Union{AbstractLogger, Nothing} + debug::Any # todo: @stacktrace(model, "optional msg") inserts a stacktrace into the debug field incl. the current function + + aux::_IESoptAuxiliaryData + + # auxiliary_objective_sc # todo: remove from doc +end + +function _IESoptInputData(toplevel_yaml::Dict) + return _IESoptInputData( + nothing, + Dict{String, Union{Module, DataFrames.DataFrame}}(), + Dict{Any, Any}(), + Dict{Symbol, Any}(), + Dict{Symbol, Any}(), + Dict{String, Any}(), + toplevel_yaml, + ) +end + +function _IESoptModelData() + return _IESoptModelData( + Vector{_ID}(), + Dict{String, _CoreComponent}(), + Dict{String, NamedTuple{(:terms, :expr), Tuple{Set{JuMP.AffExpr}, JuMP.AffExpr}}}(), + Dict{_ID, Snapshot}(), + Dict{String, Carrier}(), + ) +end + +function _IESoptAuxiliaryData() + return _IESoptAuxiliaryData( + Dict{JuMP.ConstraintRef, NamedTuple}(), + nothing, + (groups=Dict{String, Vector{_ID}}(), constr=Dict{String, Vector{JuMP.ConstraintRef}}()), + Dict{String, Any}(), + Dict{String, Vector{Union{String, Float64}}}(), + Dict{Tuple{Symbol, String, Symbol}, _Profiling}(), + Dict{Symbol, Any}(), + ) +end + +function _IESoptResultData() + return _IESoptResultData( + Dict{String, _CoreComponentResult}(), + Dict{String, Float64}(), + Dict{Symbol, Union{Float64, Vector{Float64}}}(), + Dict{String, Any}(), + ) +end + +function _IESoptData(toplevel_yaml::Dict) + return _IESoptData( + _IESoptInputData(toplevel_yaml), + _IESoptModelData(), + _IESoptResultData(), + nothing, + nothing, + _IESoptAuxiliaryData(), + ) +end + +_iesopt(model::JuMP.Model) = model.ext[:iesopt] +_iesopt_config(model::JuMP.Model) = _iesopt(model).input.config +_iesopt_debug(model::JuMP.Model) = _iesopt(model).debug # TODO: as soon as debug is a "stack", only report the last entry in this function +_iesopt_cache(model::JuMP.Model) = _iesopt(model).aux.cache +_iesopt_model(model::JuMP.Model) = _iesopt(model).model + +_has_addons(model::JuMP.Model) = !isempty(_iesopt(model).input.addons) + +_has_cache(model::JuMP.Model, cache::Symbol) = haskey(_iesopt_cache(model), cache) +_get_cache(model::JuMP.Model, cache::Symbol) = _iesopt_cache(model)[cache] + +function _is_cached(model::JuMP.Model, cache::Symbol, entry::Any) + return _has_cache(model, cache) && haskey(_iesopt_cache(model)[cache], entry) +end +_get_cached(model::JuMP.Model, cache::Symbol, entry::Any) = _iesopt_cache(model)[cache][entry] diff --git a/src/core/carrier.jl b/src/core/carrier.jl new file mode 100644 index 0000000..e706540 --- /dev/null +++ b/src/core/carrier.jl @@ -0,0 +1,45 @@ +""" + struct Carrier + name::String + unit::Union{String, Nothing} + end + +Represents a single (energy) carrier with a given `name`. + +This is mostly used to represent various commodities that (easily) represent some form of energy (e.g. gas, water, ...), +but also enables modelling commodities that are not (treated as) representing some type of energy (e.g. CO2). Specify +`unit` to bind that carrier to an (arbitrary) unit that allows easier plotting and result analysis. +""" +Base.@kwdef struct Carrier + # mandatory + name::String + + # optional + unit::Union{String, Nothing} = nothing + color::Union{String, Nothing} = nothing +end + +Base.hash(carrier::Carrier) = hash(carrier.name) + +""" + _parse_carriers(carriers::Dict{String, Any}) + +Correctly parses a dictionary of carriers (obtained from reading model.yaml) into a dictionary that maps the carrier +name onto the `Carrier`. +""" +function _parse_carriers!(model::JuMP.Model, carriers::Dict{String, Any}) + _iesopt(model).model.carriers = Dict{String, Carrier}( + k => Carrier(; name=k, Dict(Symbol(prop) => val for (prop, val) in props)...) for (k, props) in carriers + ) + + return nothing +end + +# function _parse_carriers!(model::JuMP.Model, ::Nothing, ::_CSVModel) +# df = _iesopt(model).input.files["carriers"] +# _iesopt(model).model.carriers = Dict{String, Carrier}( +# row["name"] => Carrier(; Dict(Symbol(k) => v for (k, v) in zip(names(row), row) if !ismissing(v))...) for row in DataFrames.eachrow(df) +# ) +# end + +Base.string(carrier::Carrier) = carrier.name diff --git a/src/core/connection.jl b/src/core/connection.jl new file mode 100644 index 0000000..5a174aa --- /dev/null +++ b/src/core/connection.jl @@ -0,0 +1,274 @@ +""" +A `Connection` is used to model arbitrary flows of energy between `Node`s. It allows for limits, costs, delays, ... +""" +@kwdef struct Connection <: _CoreComponent + # [Core] =========================================================================================================== + model::JuMP.Model + init_state::Ref{Symbol} = Ref(:empty) + constraint_safety::Bool + constraint_safety_cost::_ScalarInput + + # [Mandatory] ====================================================================================================== + name::_String + + raw"""```{"mandatory": "yes", "values": "string", "default": "-"}``` + This `Connection` models a flow from `node_from` to `node_to` (both are `Node`s). + """ + node_from::Union{_String, Nothing} = nothing + + raw"""```{"mandatory": "yes", "values": "string", "default": "-"}``` + This `Connection` models a flow from `node_from` to `node_to` (both are `Node`s). + """ + node_to::Union{_String, Nothing} = nothing + + carrier::Carrier + + # [Optional] ======================================================================================================= + config::Dict{String, Any} = Dict() + ext::Dict{String, Any} = Dict() + addon::Union{String, Nothing} = nothing + conditional::Bool = false + + capacity::_OptionalExpression = nothing + lb::_OptionalExpression = nothing + ub::_OptionalExpression = nothing + + cost::_OptionalExpression = nothing + loss::_OptionalExpression = nothing + + # Energy Transfer Distribution Factors + etdf::Union{Dict{<:Union{_ID, _String}, <:Any}, _String, Nothing} = nothing + + # Powerflow + is_pf_controlled::Ref{Bool} = Ref(false) + pf_I::_OptionalScalarInput = nothing + pf_V::_OptionalScalarInput = nothing + pf_X::_OptionalScalarInput = nothing + pf_R::_OptionalScalarInput = nothing + pf_B::_OptionalScalarInput = nothing + + # [Internal] ======================================================================================================= + # - + + # [External] ======================================================================================================= + # - + + # [Optimization Container] ========================================================================================= + _ccoc = _CoreComponentOptContainer() +end + +_result_fields(::Connection) = (:name, :carrier, :node_from, :node_to) + +function _check(connection::Connection) + !connection.conditional && return true + + # Check if the connected nodes exist. + !haskey(_iesopt(connection.model).model.components, connection.node_from) && return false + !haskey(_iesopt(connection.model).model.components, connection.node_to) && return false + + return true +end + +function _prepare!(connection::Connection) + model = connection.model + + if !isnothing(connection.etdf) + @error "ETDFs are disabled until a rework to PowerModels.jl is done" connection = connection.name + # if isa(connection.etdf, _String) + # # Load ETDF from supplied file. + # data = _iesopt(model).input.files[connection.etdf] + + # if hasproperty(data, "connection.name") + # # This is a static ETDF matrix (n x l). + # df = @view data[data[!, "connection.name"] .== connection.name, 2:end] + # connection.etdf = Dict(ids[k] => v for (k, v) in Pair.(names(df), collect(df[1, :]))) + # else + # # This is a dynamic ETDF matrix (t x nl). + # connection.etdf = Dict{_ID, Vector{_ScalarInput}}() + # for col in names(data) + # node, conn = split(col, ":") + # conn != connection.name && continue + # connection.etdf[ids[node]] = data[!, col] + # end + # end + # else + # # Convert string name of ETDFs to proper Node ids. + # connection.etdf = Dict( + # ids[k] => + # connection.etdf[k] isa Number ? connection.etdf[k] : _conv_S2NI(model, connection.etdf[k]) for + # k in keys(connection.etdf) + # ) + # end + end + + # Cutoff ETDFs based on model threshold. + if !isnothing(connection.etdf) + # for (node, val) in connection.etdf + # if isa(connection.etdf[node], Number) + # connection.etdf[node] = + # abs(connection.etdf[node]) < _iesopt_config(model).etdf_threshold ? 0 : connection.etdf[node] + # else + # for t in 1:length(connection.etdf[node]) + # connection.etdf[node][t] = + # abs(connection.etdf[node][t]) < _iesopt_config(model).etdf_threshold ? 0 : + # connection.etdf[node][t] + # end + # end + # end + end + + # Check whether this Connection is controlled by a global PF addon + connection.is_pf_controlled[] = any([ + !isnothing(connection.pf_B), + !isnothing(connection.pf_I), + !isnothing(connection.pf_R), + !isnothing(connection.pf_V), + !isnothing(connection.pf_X), + ]) + + if connection.is_pf_controlled[] + # Do proper per-unit conversion for a three-phase system: + # see: https://electricalacademia.com/electric-power/per-unit-calculation-per-unit-system-examples/ + V_base = connection.pf_V * 1e3 # voltage base, based on line voltage + S_base = 1e6 # apparent power base = 1 MVA + I_base = S_base / (V_base * sqrt(3)) + Z_base = V_base / (I_base * sqrt(3)) + + connection.pf_V = (connection.pf_V * 1e3) / V_base + connection.pf_I = (connection.pf_I * 1e3) / I_base + if !isnothing(connection.pf_R) + connection.pf_R = connection.pf_R / Z_base + end + if !isnothing(connection.pf_X) + connection.pf_X = connection.pf_X / Z_base + end + + if isnothing(connection.capacity) + # Only calculate capacity if it is not given by the user + connection.capacity = _convert_to_expression(model, connection.pf_V * connection.pf_I) + end + # todo: convert B1 + end + + return true +end + +function _isvalid(connection::Connection) + if !isnothing(connection.capacity) && (!isnothing(connection.lb)) + @critical "Setting as well as for Connection can result in unexpected behaviour" connection = + connection.name + end + + if !isnothing(connection.capacity) && (!isnothing(connection.ub)) + @critical "Setting as well as for Connection can result in unexpected behaviour" connection = + connection.name + end + + if !isnothing(connection.cost) && connection.cost.is_expression && length(connection.cost.decisions) > 0 + @critical "Endogenuous Connection leads to quadratic expressions and is currently not supported" connection = + connection.name + end + + if !isnothing(connection.loss) && + (isnothing(connection.lb) || connection.lb.is_expression || any(<(0), connection.lb.value)) + @critical "Setting for Connection requires nonnegative " connection = connection.name + end + + return true +end + +function _setup!(connection::Connection) + return nothing +end + +function _result(connection::Connection, mode::String, field::String; result::Int=1) + if isnothing(findfirst("flow", field)) + @error "Connection cannot extract field" connection = connection.name field = field + return nothing + end + + if connection.is_pf_controlled[] + if mode == "value" && field == "flow" + return "$(connection.name).flow", JuMP.value.(connection.exp.pf_flow; result=result) + elseif mode == "sum" && field == "flow" + return "$(connection.name).sum.flow", sum(JuMP.value.(connection.exp.pf_flow; result=result)) + end + elseif !isnothing(connection.etdf) + @error "ETDF results are disabled until a rework to PowerModels.jl is done" connection = connection.name + # flow = sum( + # _iesopt(connection.model).model.components[id].exp.injection .* connection.etdf[id] for + # id in keys(connection.etdf) + # ) + # if mode == "value" && field == "flow" + # return "$(connection.name).flow", JuMP.value.(flow; result=result) + # elseif mode == "sum" && field == "flow" + # return "$(connection.name).sum.flow", sum(JuMP.value.(flow; result=result)) + # end + else + if mode == "value" && field == "flow" + return "$(connection.name).flow", JuMP.value.(connection.var.flow; result=result) + elseif mode == "sum" && field == "flow" + return "$(connection.name).sum.flow", sum(JuMP.value.(connection.var.flow; result=result)) + end + end + + if mode == "dual" + # todo: JuMP dual result fix + if result != 1 + @error "Duals are currently only available for the first result (this is a limitation of the JuMP interface)" + end + bound, tmp = split(field, ":") + if tmp != "flow" + @error "Connection got unknown field for result extraction" connection = connection.name mode = mode field = + field + return nothing + end + if bound == "ub" + return "<$(connection.name)>.shadow_price.ub.flow", JuMP.shadow_price.(connection.con.flow_ub) + elseif bound == "lb" + return "<$(connection.name)>.shadow_price.lb.flow", JuMP.shadow_price.(connection.con.flow_lb) + else + @error "Connection got unknown field for result extraction" connection = connection.name mode = mode field = + field + return nothing + end + end + + @error "Unknown result extraction" connection = connection.name mode = mode field = field + return nothing +end + +include("connection/exp_pf_flow.jl") +include("connection/var_flow.jl") +include("connection/con_flow_bounds.jl") +include("connection/obj_cost.jl") + +function _construct_expressions!(connection::Connection) + @profile connection.model _connection_exp_pf_flow!(connection) + return nothing +end + +function _construct_variables!(connection::Connection) + @profile connection.model _connection_var_flow!(connection) + return nothing +end + +function _after_construct_variables!(connection::Connection) + # We can now properly finalize the `lb`, `ub`, `capacity`, and `cost`. + !isnothing(connection.lb) && _finalize(connection.lb) + !isnothing(connection.ub) && _finalize(connection.ub) + !isnothing(connection.capacity) && _finalize(connection.capacity) + !isnothing(connection.cost) && _finalize(connection.cost) + + return nothing +end + +function _construct_constraints!(connection::Connection) + @profile connection.model _connection_con_flow_bounds!(connection) + return nothing +end + +function _construct_objective!(connection::Connection) + @profile connection.model _connection_obj_cost!(connection) + return nothing +end diff --git a/src/core/connection/con_flow_bounds.jl b/src/core/connection/con_flow_bounds.jl new file mode 100644 index 0000000..05df6e6 --- /dev/null +++ b/src/core/connection/con_flow_bounds.jl @@ -0,0 +1,111 @@ +@doc raw""" + _connection_con_flow_bounds!(model::JuMP.Model, connection::Connection) + +Add the constraint defining the bounds of the flow (related to `connection`) to the `model`. + +Specifiying `capacity` will lead to symmetric bounds (``\text{lb} := -capacity`` and ``\text{ub} := capacity``), while +asymmetric bounds can be set by explicitly specifiying `lb` and `ub`. + +!!! note + Usage of `etdf` is currently not fully tested, and not documented. + +Upper and lower bounds can be "infinite" (by not setting them) resulting in the repective constraints not being added, +and the flow variable therefore being (partially) unconstrained. Depending on the configuration the `flow` is calculated +differently: +- if `connection.etdf` is set, it is based on an ETDF sum flow, +- if `connection.exp.pf_flow` is available, it equals this +- else it equal `connection.var.flow` + +This flow is then constrained: + +> ```math +> \begin{aligned} +> & \text{flow}_t \geq \text{lb}, \qquad \forall t \in T \\ +> & \text{flow}_t \leq \text{ub}, \qquad \forall t \in T +> \end{aligned} +> ``` + +!!! note "Constraint safety" + The lower and upper bound constraint are subject to penalized slacks. +""" +function _connection_con_flow_bounds!(connection::Connection) + model = connection.model + components = _iesopt(model).model.components + + # todo: rework only getting/checking lb/ub once + if !isnothing(connection.capacity) || !isnothing(_get(connection.lb, 1)) + connection.con.flow_lb = Vector{JuMP.ConstraintRef}(undef, _iesopt(model).model.T[end]) + end + if !isnothing(connection.capacity) || !isnothing(_get(connection.ub, 1)) + connection.con.flow_ub = Vector{JuMP.ConstraintRef}(undef, _iesopt(model).model.T[end]) + end + + if !isnothing(connection.etdf) + etdf_flow = sum(components[id].exp.injection .* connection.etdf[id] for id in keys(connection.etdf)) + end + + for t in _iesopt(model).model.T + # If a Snapshot is representative, it's either representative or there are no activated representative Snapshots. + !_iesopt(model).model.snapshots[t].is_representative && continue + + # Calculate proper lower and upper bounds of the flow. + lb = isnothing(connection.capacity) ? _get(connection.lb, t) : -_get(connection.capacity, t) + ub = isnothing(connection.capacity) ? _get(connection.ub, t) : _get(connection.capacity, t) + + constrained_flow = if !isnothing(connection.etdf) + etdf_flow + elseif _hasexp(connection, :pf_flow) + connection.exp.pf_flow + else + connection.var.flow + end + + if !isnothing(lb) + connection.con.flow_lb[t] = + @constraint(model, constrained_flow[t] >= lb, base_name = _base_name(connection, "flow_lb[$t]")) + end + if !isnothing(ub) + connection.con.flow_ub[t] = + @constraint(model, constrained_flow[t] <= ub, base_name = _base_name(connection, "flow_ub[$t]")) + end + end + + if _has_representative_snapshots(model) + # Use the constructed representatives. + for t in _iesopt(model).model.T + _iesopt(model).model.snapshots[t].is_representative && continue + + if !isnothing(connection.capacity) || !isnothing(connection.lb) + connection.con.flow_lb[t] = connection.con.flow_lb[_iesopt(model).model.snapshots[t].representative] + end + if !isnothing(connection.capacity) || !isnothing(connection.ub) + connection.con.flow_ub[t] = connection.con.flow_ub[_iesopt(model).model.snapshots[t].representative] + end + end + end + + # Handle constraint safety (if enabled). + if connection.constraint_safety + for t in _iesopt(model).model.T + # Skip constraint safety for non-representative Snapshots. + !_iesopt(model).model.snapshots[t].is_representative && continue + + if !isnothing(connection.capacity) || !isnothing(connection.lb) + _iesopt(model).aux.constraint_safety_penalties[connection.con.flow_lb[t]] = ( + component_name=connection.name, + t=t, + description="flow_lb", + penalty=connection.constraint_safety_cost, + ) + end + if !isnothing(connection.capacity) || !isnothing(connection.ub) + _iesopt(model).aux.constraint_safety_penalties[connection.con.flow_ub[t]] = ( + component_name=connection.name, + t=t, + description="flow_ub", + penalty=connection.constraint_safety_cost, + ) + end + end + end +end diff --git a/src/core/connection/exp_pf_flow.jl b/src/core/connection/exp_pf_flow.jl new file mode 100644 index 0000000..31e2055 --- /dev/null +++ b/src/core/connection/exp_pf_flow.jl @@ -0,0 +1,20 @@ +@doc raw""" + _connection_exp_pf_flow!(connection::Connection) + +Construct the `JuMP.AffExpr` holding the PTDF based flow of this `Connection`. + +This needs the global addon `Powerflow` with proper settings for `mode`, as well as properly configured power flow +parameters for this `Connection` (`pf_V`, `pf_I`, `pf_X`, ...). +""" +function _connection_exp_pf_flow!(connection::Connection) + model = connection.model + + !haskey(_iesopt(model).input.addons, "Powerflow") && return + !connection.is_pf_controlled[] && return + + if _iesopt(model).input.addons["Powerflow"].config["__settings__"].mode === :linear_angle + connection.exp.pf_flow = [JuMP.AffExpr(0) for _ in _iesopt(model).model.T] + end + + return nothing +end diff --git a/src/core/connection/obj_cost.jl b/src/core/connection/obj_cost.jl new file mode 100644 index 0000000..c05c922 --- /dev/null +++ b/src/core/connection/obj_cost.jl @@ -0,0 +1,38 @@ +@doc raw""" + _connection_obj_cost!(model::JuMP.Model, connection::Connection) + +Add the (potential) cost of this `connection` to the global objective function. + +The `connection.cost` setting introduces a fixed cost of "transportation" to the flow of this `Connection`. It is based +on the directed flow. This means that flows in the "opposite" direction will lead to negative costs: + +```math +\sum_{t \in T} \text{flow}_t \cdot \text{cost}_t \cdot \omega_t +``` + +Here $\omega_t$ is the weight of `Snapshot` `t`. + +!!! note "Costs for flows in both directions" + If you need to apply a cost term to the absolute value of the flow, consider splitting the `Connection` into two + different ones, in opposing directions, and including `lb = 0`. +""" +function _connection_obj_cost!(connection::Connection) + if isnothing(connection.cost) + return nothing + end + + model = connection.model + + connection.obj.cost = JuMP.AffExpr(0.0) + for t in _iesopt(connection.model).model.T + JuMP.add_to_expression!( + connection.obj.cost, + connection.var.flow[t], + _weight(model, t) * _get(connection.cost, t), + ) + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, connection.obj.cost) + + return nothing +end diff --git a/src/core/connection/var_flow.jl b/src/core/connection/var_flow.jl new file mode 100644 index 0000000..aabfce4 --- /dev/null +++ b/src/core/connection/var_flow.jl @@ -0,0 +1,83 @@ +@doc raw""" + _connection_var_flow!(connection::Connection) + +Add the variable representing the flow of this `connection` to the `model`. This can be accessed via +`connection.var.flow[t]`. + +Additionally, the flow gets "injected" at the `Node`s that the `connection` is connecting, resulting in +```math +\begin{aligned} + & \text{connection.node}_{from}\text{.injection}_t = \text{connection.node}_{from}\text{.injection}_t - \text{flow}_t, \qquad \forall t \in T \\ + & \text{connection.node}_{to}\text{.injection}_t = \text{connection.node}_{to}\text{.injection}_t + \text{flow}_t, \qquad \forall t \in T +\end{aligned} +``` + +> For "PF controlled" `Connection`s (ones that define the necessary power flow parameters), the flow variable may not be +> constructed (depending on specific power flow being used). The automatic result extraction will detect this and return +> the correct values either way. Accessing it manually can be done using `connection.exp.pf_flow[t]`. +""" +function _connection_var_flow!(connection::Connection) + model = connection.model + components = _iesopt(model).model.components + + if !isnothing(connection.etdf) + return nothing + end + + node_from = components[connection.node_from] + node_to = components[connection.node_to] + + if connection.is_pf_controlled[] + if _has_representative_snapshots(model) + @critical "Representative Snapshots are currently not supported for models using Powerflow" + end + + # This is a passive Conection. + @simd for t in _iesopt(model).model.T + # Construct the flow expression. + JuMP.add_to_expression!(connection.exp.pf_flow[t], node_from.var.pf_theta[t], 1.0 / connection.pf_X) + JuMP.add_to_expression!(connection.exp.pf_flow[t], node_to.var.pf_theta[t], -1.0 / connection.pf_X) + + # Connect to correct nodes. + JuMP.add_to_expression!(node_from.exp.injection[t], connection.exp.pf_flow[t], -1) + JuMP.add_to_expression!(node_to.exp.injection[t], connection.exp.pf_flow[t], 1) + end + else + # This is a controllable Connection. + + # Construct the flow variable. + if !_has_representative_snapshots(model) + connection.var.flow = @variable( + model, + [t = _iesopt(model).model.T], + base_name = _base_name(connection, "flow"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @variable(model, base_name = _base_name(connection, "flow[$(t)]")) for + t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all variables, either as themselves or their representative. + connection.var.flow = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + + # Connect to correct nodes. + loss = something(connection.loss, 0) + @simd for t in _iesopt(model).model.T + JuMP.add_to_expression!(components[connection.node_from].exp.injection[t], -connection.var.flow[t]) + JuMP.add_to_expression!( + components[connection.node_to].exp.injection[t], + connection.var.flow[t], + 1 - _get(loss, t), + ) + end + end + + return nothing +end diff --git a/src/core/decision.jl b/src/core/decision.jl new file mode 100644 index 0000000..89fdbe3 --- /dev/null +++ b/src/core/decision.jl @@ -0,0 +1,159 @@ +""" +A `Decision` represents a basic decision variable in the model that can be used as input for various other core +component's settings, as well as have associated costs. +""" +@kwdef struct Decision <: _CoreComponent + # [Core] =========================================================================================================== + model::JuMP.Model + init_state::Ref{Symbol} = Ref(:empty) + constraint_safety::Bool + constraint_safety_cost::_ScalarInput + + # [Mandatory] ====================================================================================================== + name::_String + + # [Optional] ======================================================================================================= + config::Dict{String, Any} = Dict() + ext::Dict{String, Any} = Dict() + addon::Union{String, Nothing} = nothing + conditional::Bool = false + + raw"""```{"mandatory": "no", "values": "numeric", "default": "`0`"}``` + Minimum size of the decision (considered for each "unit" if count allows multiple "units"). + """ + lb::_OptionalScalarInput = 0 + ub::_OptionalScalarInput = nothing + fixed_value::_OptionalScalarInput = nothing + cost::_OptionalScalarInput = nothing + fixed_cost::_OptionalScalarInput = nothing + + mode::Symbol = :linear + sos::Vector{Dict{String, Float64}} = Vector() + + # [Internal] ======================================================================================================= + # - + + # [External] ======================================================================================================= + # - + + # [Optimization Container] ========================================================================================= + _ccoc = _CoreComponentOptContainer() +end + +_result_fields(::Decision) = (:name, :mode) + +function _prepare!(decision::Decision) + return true +end + +function _isvalid(decision::Decision) + model = decision.model + + if (decision.mode in [:binary, :integer, :sos1, :sos2]) && !_is_milp(model) + @critical "Model config only allows LP but MILP is required" decision = decision.name mode = decision.mode + end + + if !isnothing(decision.fixed_cost) && !_is_milp(model) + @critical "Model config only allows LP but MILP is required for modelling fixed costs" decision = decision.name mode = + decision.mode + end + + if (decision.mode === :binary) && !isnothing(decision.ub) && decision.ub != 1.0 + @critical "Binary variables with `ub != 1` are not possible" decision = decision.name ub = decision.ub + end + + if (decision.mode in [:sos1, :sos2]) && !isnothing(decision.cost) + @critical "SOS1/SOS2 Decisions should not have a `cost` parameter" decision = decision.name mode = decision.mode + end + + if (decision.mode != :fixed) && !isnothing(decision.fixed_value) + @critical "Decisions that are not fixed can not have a pre-set value" decision = decision.name + end + + if !isnothing(decision.fixed_cost) && isnothing(decision.ub) && !(decision.mode in [:sos1, :sos2]) + @critical "Decisions with fixed costs require a defined upper bound" decision = decision.name + end + + return true +end + +function _result(decision::Decision, mode::String, field::String; result::Int=1) + if !(field in ["value", "size", "count"]) + @error "Decision cannot extract field" decision = decision.name field = field + return nothing + end + + if mode == "dual" + if decision.mode != :fixed + @error "Extracting of non-fixed Decisions is currently not supported" decision = decision.name + return nothing + else + # todo: JuMP dual result fix + if result != 1 + @error "Duals are currently only available for the first result (this is a limitation of the JuMP interface)" + end + return "Decision.fixed_value.dual", JuMP.reduced_cost(decision.var.value) + end + end + + if mode != "value" + @error "Decision cannot apply mode to extraction of field" decision = decision.name mode = mode + return nothing + end + + if field in ["size", "count"] + @error "`decision:size` and `decision:count` are deprecated and most likely do not work as exepected; please change to extracting `decision:value`" decision = + decision.name mode = mode + end + + if field == "value" + return "Decision.value", JuMP.value.(_value(decision); result=result) + elseif field == "size" + return "Decision.size", JuMP.value.(_size(decision); result=result) + elseif field == "count" + return "Decision.count", JuMP.value.(_count(decision); result=result) + end + + @error "Unknown result extraction" decision = decision.name mode = mode field = field + return nothing +end + +include("decision/con_fixed.jl") +include("decision/con_sos_value.jl") +include("decision/con_sos1.jl") +include("decision/con_sos2.jl") +include("decision/obj_fixed.jl") +include("decision/obj_sos.jl") +include("decision/obj_value.jl") +include("decision/var_fixed.jl") +include("decision/var_sos.jl") +include("decision/var_value.jl") + +function _construct_variables!(decision::Decision) + @profile decision.model _decision_var_fixed!(decision) + @profile decision.model _decision_var_sos!(decision) + @profile decision.model _decision_var_value!(decision) + return nothing +end + +function _construct_constraints!(decision::Decision) + @profile decision.model _decision_con_fixed!(decision) + @profile decision.model _decision_con_sos_value!(decision) + @profile decision.model _decision_con_sos1!(decision) + return _decision_con_sos2!(decision) +end + +function _construct_objective!(decision::Decision) + @profile decision.model _decision_obj_fixed!(decision) + @profile decision.model _decision_obj_sos!(decision) + @profile decision.model _decision_obj_value!(decision) + return nothing +end + +_value(decision::Decision) = decision.var.value +_count(decision::Decision) = decision.var.value +_size(decision::Decision) = decision.var.value + +_value(decision::Decision, t::_ID) = _value(decision) +_count(decision::Decision, t::_ID) = _count(decision) +_size(decision::Decision, t::_ID) = _size(decision) diff --git a/src/core/decision/con_fixed.jl b/src/core/decision/con_fixed.jl new file mode 100644 index 0000000..ce7e6d8 --- /dev/null +++ b/src/core/decision/con_fixed.jl @@ -0,0 +1,28 @@ +@doc raw""" + _decision_con_fixed!(decision::Decision) + +to be added +""" +function _decision_con_fixed!(decision::Decision) + if isnothing(decision.fixed_cost) || (decision.mode === :sos1) + return + end + + model = decision.model + + if decision.mode === :sos2 + decision.con.fixed = @constraint( + model, + decision.var.value <= decision.var.fixed * maximum(it["value"] for it in decision.sos), + base_name = _base_name(decision, "fixed") + ) + else + decision.con.fixed = @constraint( + model, + decision.var.value <= decision.var.fixed * decision.ub, + base_name = _base_name(decision, "fixed") + ) + end + + return nothing +end diff --git a/src/core/decision/con_sos1.jl b/src/core/decision/con_sos1.jl new file mode 100644 index 0000000..a6a5f3c --- /dev/null +++ b/src/core/decision/con_sos1.jl @@ -0,0 +1,43 @@ +@doc raw""" + _decision_con_sos1!(decision::Decision) + +to be added +""" +function _decision_con_sos1!(decision::Decision) + if decision.mode != :sos1 + return nothing + end + + model = decision.model + + decision.con.sos_set = @constraint( + model, + decision.var.sos in JuMP.SOS1([item["cost"] for item in decision.sos]), # todo: considered fixed_cost here for the weight! + base_name = _base_name(decision, "sos_set") + ) + + decision.con.sos1_lb = Vector{JuMP.ConstraintRef}() + decision.con.sos1_ub = Vector{JuMP.ConstraintRef}() + sizehint!(decision.con.sos1_lb, length(decision.sos) - 1) + sizehint!(decision.con.sos1_ub, length(decision.sos) - 1) + for i in eachindex(decision.sos) + push!( + decision.con.sos1_lb, + @constraint( + model, + decision.sos[i]["lb"] * decision.var.sos[i] <= decision.var.sos1_value[i], + base_name = _base_name(decision, "sos1_lb[$(i)]") + ) + ) + push!( + decision.con.sos1_ub, + @constraint( + model, + decision.var.sos1_value[i] <= decision.sos[i]["ub"] * decision.var.sos[i], + base_name = _base_name(decision, "sos1_ub[$(i)]") + ) + ) + end + + return nothing +end diff --git a/src/core/decision/con_sos2.jl b/src/core/decision/con_sos2.jl new file mode 100644 index 0000000..928db47 --- /dev/null +++ b/src/core/decision/con_sos2.jl @@ -0,0 +1,23 @@ +@doc raw""" + _decision_con_sos2!(decision::Decision) + +to be added +""" +function _decision_con_sos2!(decision::Decision) + if decision.mode != :sos2 + return nothing + end + + model = decision.model + + decision.con.sos_set = @constraint( + model, + decision.var.sos in JuMP.SOS2(), # todo: calculate proper weights for induced order + base_name = _base_name(decision, "sos_set") + ) + + decision.con.sos2 = + @constraint(model, sum(v for v in decision.var.sos) == 1.0, base_name = _base_name(decision, "sos2")) # todo: modify this based on fixed + + return nothing +end diff --git a/src/core/decision/con_sos_value.jl b/src/core/decision/con_sos_value.jl new file mode 100644 index 0000000..028ad74 --- /dev/null +++ b/src/core/decision/con_sos_value.jl @@ -0,0 +1,29 @@ +@doc raw""" + _decision_con_sos_value!(decision::Decision) + +to be added +""" +function _decision_con_sos_value!(decision::Decision) + if (decision.mode != :sos1) && (decision.mode != :sos2) + return nothing + end + + model = decision.model + + if decision.mode === :sos1 + decision.con.sos_value = @constraint( + model, + decision.var.value == sum(v for v in decision.var.sos1_value), + base_name = _base_name(decision, "sos_value") + ) + elseif decision.mode === :sos2 + decision.con.sos_value = @constraint( + model, + decision.var.value == + _affine_expression(decision.var.sos[i] * decision.sos[i]["value"] for i in eachindex(decision.var.sos)), + base_name = _base_name(decision, "sos_value") + ) + end + + return nothing +end diff --git a/src/core/decision/obj_fixed.jl b/src/core/decision/obj_fixed.jl new file mode 100644 index 0000000..9396cf2 --- /dev/null +++ b/src/core/decision/obj_fixed.jl @@ -0,0 +1,35 @@ +@doc raw""" + _decision_obj_fixed!(decision::Decision) + +to be added +``` +""" +function _decision_obj_fixed!(decision::Decision) + if isnothing(decision.fixed_cost) && decision.mode != :sos1 + return + end + + model = decision.model + + decision.obj.fixed = JuMP.AffExpr(0.0) + if decision.mode === :sos1 + for i in eachindex(decision.sos) + if haskey(decision.sos[i], "fixed_cost") + if !isnothing(decision.fixed_cost) + @warn "Decision is overwriting global fixed_cost based on SOS1 local" decision = decision.name maxlog = + 1 + end + + JuMP.add_to_expression!(decision.obj.fixed, decision.var.sos[i], decision.sos[i]["fixed_cost"]) + elseif !isnothing(decision.fixed_cost) + JuMP.add_to_expression!(decision.obj.fixed, decision.var.sos[i], decision.fixed_cost) + end + end + else + JuMP.add_to_expression!(decision.obj.fixed, decision.var.fixed, decision.fixed_cost) + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, decision.obj.fixed) + + return nothing +end diff --git a/src/core/decision/obj_sos.jl b/src/core/decision/obj_sos.jl new file mode 100644 index 0000000..7b5b7ee --- /dev/null +++ b/src/core/decision/obj_sos.jl @@ -0,0 +1,27 @@ +@doc raw""" + _decision_obj_sos!(decision::Decision) + +Add the cost defined by the SOS-based value of this `Decision` to the `model`. +""" +function _decision_obj_sos!(decision::Decision) + if (decision.mode != :sos1) && (decision.mode != :sos2) + return nothing + end + + model = decision.model + + decision.obj.sos = JuMP.AffExpr(0.0) + if decision.mode === :sos1 + for i in eachindex(decision.var.sos1_value) + JuMP.add_to_expression!(decision.obj.sos, decision.var.sos1_value[i], decision.sos[i]["cost"]) + end + elseif decision.mode === :sos2 + for i in eachindex(decision.var.sos) + JuMP.add_to_expression!(decision.obj.sos, decision.var.sos[i], decision.sos[i]["cost"]) + end + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, decision.obj.sos) + + return nothing +end diff --git a/src/core/decision/obj_value.jl b/src/core/decision/obj_value.jl new file mode 100644 index 0000000..b74b972 --- /dev/null +++ b/src/core/decision/obj_value.jl @@ -0,0 +1,20 @@ +@doc raw""" + _decision_obj_value!(decision::Decision) + +Add the cost defined by the value of this `Decision` to the `model`: + +```math +\text{value} \cdot \text{cost} +``` +""" +function _decision_obj_value!(decision::Decision) + if isnothing(decision.cost) || (decision.mode === :sos1 || decision.mode === :sos2) + return nothing + end + + model = decision.model + decision.obj.value = decision.var.value * decision.cost + push!(_iesopt(model).model.objectives["total_cost"].terms, decision.obj.value) + + return nothing +end diff --git a/src/core/decision/var_fixed.jl b/src/core/decision/var_fixed.jl new file mode 100644 index 0000000..52c741b --- /dev/null +++ b/src/core/decision/var_fixed.jl @@ -0,0 +1,15 @@ +@doc raw""" + _decision_var_fixed!(decision::Decision) + +to be added +""" +function _decision_var_fixed!(decision::Decision) + if isnothing(decision.fixed_cost) || (decision.mode === :sos1) + return + end + + model = decision.model + decision.var.fixed = @variable(model, binary = true, base_name = _base_name(decision, "fixed")) + + return nothing +end diff --git a/src/core/decision/var_sos.jl b/src/core/decision/var_sos.jl new file mode 100644 index 0000000..a9444f6 --- /dev/null +++ b/src/core/decision/var_sos.jl @@ -0,0 +1,32 @@ +@doc raw""" + _decision_var_sos!(decision::Decision) + +to be added +""" +function _decision_var_sos!(decision::Decision) + if (decision.mode != :sos1) && (decision.mode != :sos2) + return nothing + end + + model = decision.model + + decision.var.sos = @variable( + model, + [1:length(decision.sos)], + binary = (decision.mode === :sos1), + lower_bound = 0, + base_name = _base_name(decision, String(decision.mode)), + container = Array + ) + + if decision.mode === :sos1 + decision.var.sos1_value = @variable( + model, + [1:length(decision.sos)], + base_name = _base_name(decision, "sos1_value"), + container = Array + ) + end + + return nothing +end diff --git a/src/core/decision/var_value.jl b/src/core/decision/var_value.jl new file mode 100644 index 0000000..3ba5b79 --- /dev/null +++ b/src/core/decision/var_value.jl @@ -0,0 +1,34 @@ +@doc raw""" + _decision_var_value!(decision::Decision) + +Add the variable describing the `value` of this `decision` to the `model`. If lower and upper +bounds (`decision.lb` and `decision.ub`) are the same, the variable will immediately be fixed to that +value. This can be accessed via `decision.var.value`. +""" +function _decision_var_value!(decision::Decision) + model = decision.model + + decision.var.value = @variable( + model, + binary = (decision.mode === :binary), + integer = (decision.mode === :integer), + base_name = _base_name(decision, "value") + ) + + if decision.mode === :fixed + JuMP.fix(decision.var.value, decision.fixed_value) + else + if !isnothing(decision.ub) && (decision.lb == decision.ub) + JuMP.fix(decision.var.value, decision.lb) + else + if !isnothing(decision.lb) + JuMP.set_lower_bound(decision.var.value, decision.lb) + end + if !isnothing(decision.ub) + JuMP.set_upper_bound(decision.var.value, decision.ub) + end + end + end + + return nothing +end diff --git a/src/core/expression.jl b/src/core/expression.jl new file mode 100644 index 0000000..ff26f8c --- /dev/null +++ b/src/core/expression.jl @@ -0,0 +1,155 @@ +struct _Expression + model::JuMP.Model + + is_temporal::Bool + is_expression::Bool + decisions::Union{Nothing, Vector{Tuple{Float64, AbstractString, AbstractString}}} + value::Union{JuMP.AffExpr, Vector{JuMP.AffExpr}, Float64, Vector{Float64}} +end +const _OptionalExpression = Union{Nothing, _Expression} + +function _get(e::_Expression, t::_ID) + if !e.is_temporal + if e.is_expression && length(e.value.terms) == 0 + return e.value.constant + end + + return e.value + else + t = + _iesopt(e.model).model.snapshots[t].is_representative ? t : + _iesopt(e.model).model.snapshots[t].representative + + if e.is_expression && length(e.value[t].terms) == 0 + return e.value[t].constant + end + + return e.value[t] + end +end + +function _get(e::_Expression) # todo use this everywhere instead of ***.value + if !e.is_expression + return e.value + end + + if e.is_temporal + if length(e.value[1].terms) == 0 + if !_has_representative_snapshots(e.model) + return JuMP.value.(e.value) + else + return [ + JuMP.value( + e.value[(_iesopt(e.model).model.snapshots[t].is_representative ? t : + _iesopt(e.model).model.snapshots[t].representative)], + ) for t in _iesopt(e.model).model.T + ] + end + end + return e.value + else + if length(e.value.terms) == 0 + return e.value.constant + end + return e.value + end +end + +# This allows chaining without checking for Type in `parser.jl` +_convert_to_expression(model::JuMP.Model, ::Nothing) = nothing +_convert_to_expression(model::JuMP.Model, data::Int64) = + _Expression(model, false, false, nothing, convert(Float64, data)) +_convert_to_expression(model::JuMP.Model, data::Float64) = _Expression(model, false, false, nothing, data) +_convert_to_expression(model::JuMP.Model, data::Vector{Int64}) = + _Expression(model, true, false, nothing, _snapshot_aggregation!(model, convert(Vector{Float64}, data))) +_convert_to_expression(model::JuMP.Model, data::Vector{Real}) = + _Expression(model, true, false, nothing, _snapshot_aggregation!(model, convert(Vector{Float64}, data))) # mixed Int64, Float64 vector +_convert_to_expression(model::JuMP.Model, data::Vector{Float64}) = + _Expression(model, true, false, nothing, _snapshot_aggregation!(model, data)) + +# todos:: +# - needs to have an option to do it "parametric" +# - does not handle filecol * decision (for availability with investment) +function _convert_to_expression(model::JuMP.Model, str::AbstractString) + base_terms = strip.(split(str, "+")) + + decisions = Vector{Tuple{Float64, AbstractString, AbstractString}}() + filecols = Vector{Tuple{Float64, AbstractString, AbstractString}}() + constants = Vector{String}() + + for term in base_terms + if occursin(":", term) + if occursin("*", term) + coeff, factor = strip.(rsplit(term, "*"; limit=2)) + push!(decisions, (_safe_parse(Float64, coeff), eachsplit(factor, ":"; limit=2, keepempty=false)...)) + else + push!(decisions, (1.0, eachsplit(term, ":"; limit=2, keepempty=false)...)) + end + elseif occursin("@", term) + if occursin("*", term) + coeff, file = strip.(rsplit(term, "*"; limit=2)) + push!(filecols, (_safe_parse(Float64, coeff), eachsplit(file, "@"; limit=2, keepempty=false)...)) + else + push!(filecols, (1.0, eachsplit(term, "@"; limit=2, keepempty=false)...)) + end + else + push!(constants, term) + end + end + + has_decision = length(decisions) > 0 + has_file = length(filecols) > 0 + + if has_file + value = _snapshot_aggregation!( + model, + sum(fc[1] .* collect(skipmissing(_iesopt(model).input.files[fc[3]][!, fc[2]])) for fc in filecols) .+ + sum(_safe_parse(Float64, c) for c in constants; init=0.0), + ) + + if has_decision + return _Expression(model, true, true, decisions, @expression(model, [t = _iesopt(model).model.T], value[t])) + else + return _Expression(model, true, false, nothing, value) + end + elseif has_decision + return _Expression( + model, + false, + true, + decisions, + JuMP.AffExpr(sum(_safe_parse(Float64, c) for c in constants; init=0.0)), + ) + else + # return _Expression(model, false, true, nothing, JuMP.AffExpr(sum(parse(Float64, c) for c in constants; init=0.0))) + # todo: this does not work for `cost: [1, 2, 3]` or similar! + return _Expression(model, false, false, nothing, sum(_safe_parse(Float64, c) for c in constants; init=0.0)) + end +end + +function _finalize(e::_Expression) + # Can not finalize a scalar/vector. + !e.is_expression && return nothing + + # No need to finalize, if there are no `Decision`s involved. + isnothing(e.decisions) && return nothing + + model = e.model + + # Add all `Decision`s to the inner expression. + for (coeff, cname, field) in e.decisions + if field == "value" + var = _value(component(model, cname)) + elseif field == "size" + var = _size(component(model, cname)) + elseif field == "count" + var = _count(component(model, cname)) + else + @critical "Wrong Decision accessor in unnamed expression" coeff decision = cname accessor = field + end + + JuMP.add_to_expression!.(e.value, var, coeff) + end + + return nothing +end diff --git a/src/core/node.jl b/src/core/node.jl new file mode 100644 index 0000000..4ec4d23 --- /dev/null +++ b/src/core/node.jl @@ -0,0 +1,236 @@ +""" +A `Node` represents a basic intersection/hub for energy flows. This can for example be some sort of bus (for electrical +systems). It enforces a nodal balance equation (= "energy that flows into it must flow out") for every +[`Snapshot`](@ref). Enabling the internal state of the `Node` allows it to act as energy storage, modifying the nodal +balance equation. This allows using `Node`s for various storage tasks (like batteries, hydro reservoirs, heat storages, +...). + +!!! details "Basic Examples" + A `Node` that represents an electrical bus: + ```yaml + bus: + type: Node + carrier: electricity + ``` + A `Node` that represents a simplified hydrogen storage: + ```yaml + store: + type: Node + carrier: hydrogen + has_state: true + state_lb: 0 + state_ub: 50 + ``` +""" +@kwdef struct Node <: _CoreComponent + # [Core] =========================================================================================================== + model::JuMP.Model + init_state::Ref{Symbol} = Ref(:empty) + constraint_safety::Bool + constraint_safety_cost::_ScalarInput + + # [Mandatory] ====================================================================================================== + name::_String + raw"""```{"mandatory": "yes", "values": "string", "default": "-"}``` + `Carrier` of this `Node`. All connecting components need to respect that. + """ + carrier::Carrier + + # [Optional] ======================================================================================================= + config::Dict{String, Any} = Dict() + ext::Dict{String, Any} = Dict() + addon::Union{String, Nothing} = nothing + conditional::Bool = false + + raw"""```{"mandatory": "no", "values": "`true` or `false`", "default": "`false`"}``` + If `true`, the `Node` is considered to have an internal state ("stateful `Node`"). This allows it to act as energy + storage. Connect `Connection`s or `Unit`s to it, acting as charger/discharger. + """ + has_state::Bool = false + + raw"""```{"mandatory": "no", "values": "numeric", "default": "``-\\infty``"}``` + Lower bound of the internal state, requires `has_state = true`. + """ + state_lb::_OptionalExpression = nothing + + raw"""```{"mandatory": "no", "values": "numeric", "default": "``+\\infty``"}``` + Upper bound of the internal state, requires `has_state = true`. + """ + state_ub::_OptionalExpression = nothing + + raw"""```{"mandatory": "no", "values": "`eq`, `geq`, or `disabled`", "default": "`eq`"}``` + Controls how the state considers the boundary between last and first `Snapshot`. `disabled` disables cyclic + behaviour of the state (see also `state_initial`), `eq` leads to the state at the end of the year being the initial + state at the beginning of the year, while `geq` does the same while allowing the end-of-year state to be higher (= + "allowing to destroy energy at the end of the year"). + """ + state_cyclic::Symbol = :eq + + raw"""```{"mandatory": "no", "values": "numeric", "default": "-"}``` + Sets the initial state. Must be used in combination with `state_cyclic = disabled`. + """ + state_initial::_OptionalScalarInput = nothing + + raw"""```{"mandatory": "no", "values": "numeric", "default": "-"}``` + Sets the final state. Must be used in combination with `state_cyclic = disabled`. + """ + state_final::_OptionalScalarInput = nothing + + raw"""```{"mandatory": "no", "values": "numeric", "default": "`0`"}``` + Per `Snapshot` percentage loss of state (loosing 1% should be set as `0.01`). + """ + state_percentage_loss::_OptionalScalarInput = nothing + + raw"""```{"mandatory": "no", "values": "`enforce`, `destroy`, or `create`", "default": "`enforce`"}``` + Can only be used for `has_state = false`. `enforce` forces total injections to always be zero (similar to + Kirchhoff's current law), `create` allows "supply < demand", `destroy` allows "supply > demand", at this `Node`. + """ + nodal_balance::Symbol = :enforce + + raw"""```{"mandatory": "no", "values": "integer", "default": "-"}``` + TODO. + """ + sum_window_size::_OptionalScalarInput = nothing + + raw"""```{"mandatory": "no", "values": "integer", "default": "`1`"}``` + TODO. + """ + sum_window_step::_ScalarInput = 1 + + etdf_group::Union{_String, Nothing} = nothing # todo: retire this in favor of pf_zone + + # Powerflow + pf_slack::Bool = false + + # [Internal] ======================================================================================================= + # - + + # [External] ======================================================================================================= + # results::Union{Dict, Nothing} = nothing + + # [Optimization Container] ========================================================================================= + _ccoc = _CoreComponentOptContainer() +end + +_result_fields(::Node) = (:name, :carrier, :has_state, :nodal_balance) + +function _prepare!(node::Node) + return true +end + +function _isvalid(node::Node) + (node.state_cyclic in [:eq, :geq, :disabled]) || (@critical " invalid" node = node.name) + + if !isnothing(node.state_final) && node.state_cyclic != :disabled + @critical "Nodes with a fixed final state need to set `state_cyclic` to `disabled`" node = node.name node.state_cyclic + end + + if !isnothing(node.etdf_group) && node.has_state + @critical "Activating ETDF is not supported for stateful nodes" node = node.name + end + + if (node.nodal_balance === :sum) && _has_representative_snapshots(node.model) + @critical "Sum Nodes are not possible with representative Snapshots" node = node.name + end + + if node.nodal_balance === :sum + if node.sum_window_size == length(_iesopt(node.model).model.T) + if node.sum_window_step != 1 + @error "`sum_window_step` should probably be 1" node = node.name + end + end + if isnothing(node.sum_window_step) + @critical "`sum_window_step` undefined" node = node.name + end + end + + return true +end + +function _setup!(node::Node) + model = node.model + + node.con.nodalbalance = Vector{JuMP.ConstraintRef}(undef, _iesopt(model).model.T[end]) + + if !isnothing(node.etdf_group) + # Check if we need to create the current ETDF group. + if !haskey(_iesopt(model).aux.etdf.groups, node.etdf_group) + _iesopt(model).aux.etdf.groups[node.etdf_group] = [] + end + end + + return nothing +end + +function _result(node::Node, mode::String, field::String; result::Int=1) + if !(field in ["state", "nodal_balance", "injection", "extraction"]) + @error "Node cannot extract field" node = node.name field = field + return nothing + end + + if mode == "dual" && field == "nodal_balance" + # todo: JuMP dual result fix + if result != 1 + @error "Duals are currently only available for the first result (this is a limitation of the JuMP interface)" + end + return "$(node.name).nodal_balance.shadow_price", JuMP.shadow_price.(node.con.nodalbalance) + end + + if mode == "value" && field == "state" + return "$(node.name).state", JuMP.value.(node.var.state; result=result) + end + + if field == "injection" + if mode == "value" + return "$(node.name).injection", JuMP.value.(node.exp.injection; result=result) + elseif mode == "sum" + return "$(node.name).injection", sum(JuMP.value.(node.exp.injection; result=result)) + end + elseif field == "extraction" + if mode == "value" + return "$(node.name).extraction", -JuMP.value.(node.exp.injection; result=result) + elseif mode == "sum" + return "$(node.name).extraction", -sum(JuMP.value.(node.exp.injection; result=result)) + end + end + + @error "Unknown result extraction" node = node.name mode = mode field = field + return nothing +end + +include("node/exp_injection.jl") +include("node/var_state.jl") +include("node/var_pf_theta.jl") +include("node/con_state_bounds.jl") +include("node/con_nodalbalance.jl") +include("node/con_last_state.jl") + +function _construct_expressions!(node::Node) + @profile node.model _node_exp_injection!(node) + return nothing +end + +function _construct_variables!(node::Node) + @profile node.model _node_var_state!(node) + @profile node.model _node_var_pf_theta!(node) + return nothing +end + +function _after_construct_variables!(node::Node) + # We can now properly finalize the `state_lb`, and `state_ub`. + !isnothing(node.state_lb) && _finalize(node.state_lb) + !isnothing(node.state_ub) && _finalize(node.state_ub) + + return nothing +end + +function _construct_constraints!(node::Node) + @profile node.model _node_con_state_bounds!(node) + @profile node.model _node_con_nodalbalance!(node) + @profile node.model _node_con_last_state!(node) + return nothing +end + +function _construct_objective!(node::Node) + return nothing +end diff --git a/src/core/node/con_last_state.jl b/src/core/node/con_last_state.jl new file mode 100644 index 0000000..4b70adf --- /dev/null +++ b/src/core/node/con_last_state.jl @@ -0,0 +1,87 @@ +@doc raw""" + _node_con_last_state!(model::JuMP.Model, node::Node) + +Add the constraint defining the bounds of the `node`'s state during the last Snapshot to the `model`, if +`node.has_state == true`. + +This is necessary since it could otherwise happen, that the state following the last Snapshot +is actually not feasible (e.g. we could charge a storage by more than it's state allows for). The equations are based on +the construction of the overall state variable. + +```math +\begin{aligned} + & \text{state}_{end} \cdot \text{factor}^\omega_t + \text{injection}_{end} \cdot \omega_t \geq \text{state}_{lb} \\ + & \text{state}_{end} \cdot \text{factor}^\omega_t + \text{injection}_{end} \cdot \omega_t \leq \text{state}_{ub} +\end{aligned} +``` + +Here ``\omega_t`` is the `weight` of `Snapshot` `t`, and ``\text{factor}`` is either `1.0` (if there are now percentage +losses configured), or `(1.0 - node.state_percentage_loss)` otherwise. + +!!! note "Constraint safety" + The lower and upper bound constraint are subject to penalized slacks. +""" +function _node_con_last_state!(node::Node) + if !isnothing(node.etdf_group) + return nothing + end + + if !node.has_state + return nothing + end + + model = node.model + + factor = isnothing(node.state_percentage_loss) ? 1.0 : (1.0 - node.state_percentage_loss) + t = _iesopt(model).model.T[end] + + injection_t = t + if _has_representative_snapshots(model) + if !_iesopt(model).model.snapshots[t].is_representative + injection_t = _iesopt(model).model.snapshots[t].representative + end + end + + lb = _get(node.state_lb, t) + ub = _get(node.state_ub, t) + + if !isnothing(node.state_final) + if (!isnothing(lb) && (node.state_final < lb)) || (!isnothing(ub) && (node.state_final > ub)) + @warn "`state_final` is out of bounds and will be overwritten" node = node.name state_final = + node.state_final lb ub + end + + lb = node.state_final + ub = node.state_final + end + + if !isnothing(lb) && node.nodal_balance != :create + node.con.last_state_lb = @constraint( + model, + lb <= node.var.state[t] * (factor^_weight(model, t)) + node.exp.injection[injection_t] * _weight(model, t), + base_name = _base_name(node, "last_state_lb"), + container = Array + ) + end + + if !isnothing(ub) && node.nodal_balance != :destroy + node.con.last_state_ub = @constraint( + model, + ub >= node.var.state[t] * (factor^_weight(model, t)) + node.exp.injection[injection_t] * _weight(model, t), + base_name = _base_name(node, "last_state_ub") + ) + end + + if node.constraint_safety + if !isnothing(node.state_lb) + _iesopt(model).aux.constraint_safety_penalties[node.con.last_state_lb] = + (component_name=node.name, t=t, description="last_state_lb", penalty=node.constraint_safety_cost) + end + if !isnothing(node.state_ub) + _iesopt(model).aux.constraint_safety_penalties[node.con.last_state_ub] = + (component_name=node.name, t=t, description="last_state_ub", penalty=node.constraint_safety_cost) + end + end + + return nothing +end diff --git a/src/core/node/con_nodalbalance.jl b/src/core/node/con_nodalbalance.jl new file mode 100644 index 0000000..c365db9 --- /dev/null +++ b/src/core/node/con_nodalbalance.jl @@ -0,0 +1,188 @@ +@doc raw""" + _node_con_nodalbalance!(model::JuMP.Model, node::Node) + +Add the constraint describing the nodal balance to the `model`. + +Depending on whether the `node` is stateful or not, this constructs different representations: + +> `if node.has_state == true` +> ```math +> \begin{aligned} +> & \text{state}_t = \text{state}_{t-1} \cdot \text{factor}^\omega_{t-1} + \text{injection}_{t-1} \cdot \omega_{t-1}, \qquad \forall t \in T \setminus \{1\} \\ +> \\ +> & \text{state}_1 = \text{state}_{end} \cdot \text{factor}^\omega_{end} + \text{injection}_{end} \cdot \omega_{end} +> \end{aligned} +> ``` + +Here $\omega_t$ is the `weight` of `Snapshot` `t`, and $\text{factor}$ is either `1.0` (if there are now percentage +losses configured), or `(1.0 - node.state_percentage_loss)` otherwise. ``\text{injection}_{t}`` describes the overall +injection (all feed-ins minus all withdrawals). ``end`` indicates the last snapshot in ``T``. Depending on the setting +of `state_cyclic` the second constraint is written as ``=`` (`"eq"`) or ``\leq`` (`"leq"`). The latter allows the +destruction of excess energy at the end of the total time period to help with feasibility. + +> `if node.has_state == false` +> ```math +> \begin{aligned} +> & \text{injection}_{t} = 0, \qquad \forall t \in T \\ +> \end{aligned} +> ``` + +This equation can further be configured using the `nodal_balance` parameter, which accepts `enforce` (resulting in +``=``), `create` (resulting in ``\leq``; allowing the creation of energy - or "negative injections"), and `destroy` ( +resulting in ``\geq``; allowing the destruction of energy - or "positive injections"). This can be used to model some +form of energy that can either be sold (using a `destroy` `Profile` connected to this `Node`), or "wasted into the air" +using the `destroy` setting of this `Node`. +""" +function _node_con_nodalbalance!(node::Node) + if !isnothing(node.etdf_group) + return nothing + end + + model = node.model + + if node.has_state + factor = isnothing(node.state_percentage_loss) ? 1.0 : (1.0 - node.state_percentage_loss) + + for t in _iesopt(model).model.T + if (t == 1) && (node.state_cyclic === :disabled) + # If state_cyclic is disabled, we skip the basic state calculation. But in order to make sure that + # any amount of energy exchanged in the last snapshot, we use the special constraint for t=1 + # to still cap that using the upper/lower bound. This is handled in `constr_last_state.jl`. + node.con.nodalbalance[1] = @constraint(model, 0 == 0) + # The above "dummy" constraint is only used to make sure that the first entry in the array is not empty, + # which would lead to an error in result extraction. + continue + end + + t_other = t == 1 ? _iesopt(model).model.T[end] : (t - 1) + injection_t_other = _iesopt(model).model.snapshots[t_other].representative + + if (t == 1) && (node.state_cyclic === :geq) + if node.nodal_balance === :create + @warn "Setting `nodal_balance = create` for Nodes with `state_cyclic = geq` may not be what you want" node = + node.name + end + + node.con.nodalbalance[t] = @constraint( + model, + node.var.state[t] <= + node.var.state[t_other] * (factor^_weight(model, t_other)) + + node.exp.injection[injection_t_other] * _weight(model, t_other), + base_name = _base_name(node, "nodalbalance[$t]") + ) + elseif (t == 1) && (node.state_cyclic === :leq) + if node.nodal_balance === :destroy + @warn "Setting `nodal_balance = destroy` for Nodes with `state_cyclic = leq` may not be what you want" node = + node.name + end + + node.con.nodalbalance[t] = @constraint( + model, + node.var.state[t] >= + node.var.state[t_other] * (factor^_weight(model, t_other)) + + node.exp.injection[injection_t_other] * _weight(model, t_other), + base_name = _base_name(node, "nodalbalance[$t]") + ) + else + if node.nodal_balance === :enforce + node.con.nodalbalance[t] = @constraint( + model, + node.var.state[t] == + node.var.state[t_other] * (factor^_weight(model, t_other)) + + node.exp.injection[injection_t_other] * _weight(model, t_other), + base_name = _base_name(node, "nodalbalance[$t]") + ) + elseif node.nodal_balance === :create + node.con.nodalbalance[t] = @constraint( + model, + node.var.state[t] >= + node.var.state[t_other] * (factor^_weight(model, t_other)) + + node.exp.injection[injection_t_other] * _weight(model, t_other), + base_name = _base_name(node, "nodalbalance[$t]") + ) + elseif node.nodal_balance === :destroy + node.con.nodalbalance[t] = @constraint( + model, + node.var.state[t] <= + node.var.state[t_other] * (factor^_weight(model, t_other)) + + node.exp.injection[injection_t_other] * _weight(model, t_other), + base_name = _base_name(node, "nodalbalance[$t]") + ) + end + end + end + elseif node.nodal_balance === :enforce + if !_has_representative_snapshots(model) + node.con.nodalbalance = @constraint( + model, + [t = _iesopt(model).model.T], + node.exp.injection[t] == 0, + base_name = _base_name(node, "nodalbalance"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @constraint(model, node.exp.injection[t] == 0, base_name = _base_name(node, "nodalbalance[$(t)]")) for t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all constraints, either as themselves or their representative. + node.con.nodalbalance = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + elseif node.nodal_balance === :create + if !_has_representative_snapshots(model) + node.con.nodalbalance = @constraint( + model, + [t = _iesopt(model).model.T], + node.exp.injection[t] <= 0, + base_name = _base_name(node, "nodalbalance"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @constraint(model, node.exp.injection[t] <= 0, base_name = _base_name(node, "nodalbalance[$(t)]")) for t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all constraints, either as themselves or their representative. + node.con.nodalbalance = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + elseif node.nodal_balance === :destroy + if !_has_representative_snapshots(model) + node.con.nodalbalance = @constraint( + model, + [t = _iesopt(model).model.T], + node.exp.injection[t] >= 0, + base_name = _base_name(node, "nodalbalance"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @constraint(model, node.exp.injection[t] >= 0, base_name = _base_name(node, "nodalbalance[$(t)]")) for t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all constraints, either as themselves or their representative. + node.con.nodalbalance = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + elseif node.nodal_balance === :sum + T = _iesopt(model).model.T[end] + begin_steps = [t for t in 1:(node.sum_window_step):T if (t - 1 + node.sum_window_size) <= T] + node.con.nodalbalance = @constraint( + model, + [t0 = begin_steps], + sum(node.exp.injection[t] for t in t0:(t0 - 1 + node.sum_window_size)) == 0, + base_name = _base_name(node, "nodalbalance"), + container = Array + ) + end +end diff --git a/src/core/node/con_state_bounds.jl b/src/core/node/con_state_bounds.jl new file mode 100644 index 0000000..6f8f73d --- /dev/null +++ b/src/core/node/con_state_bounds.jl @@ -0,0 +1,57 @@ +@doc raw""" + _node_con_state_bounds!(model::JuMP.Model, node::Node) + +Add the constraint defining the bounds of the `node`'s state to the `model`, if `node.has_state == true`. + +```math +\begin{aligned} + & \text{state}_t \geq \text{state}_{lb}, \qquad \forall t \in T \\ + & \text{state}_t \leq \text{state}_{ub}, \qquad \forall t \in T +\end{aligned} +``` + +!!! note "Constraint safety" + The lower and upper bound constraint are subject to penalized slacks. +""" +function _node_con_state_bounds!(node::Node) + if !node.has_state + return nothing + end + + model = node.model + + if !isnothing(node.state_lb) + node.con.state_lb = @constraint( + model, + [t = _iesopt(model).model.T], + node.var.state[t] >= _get(node.state_lb, t), + base_name = _base_name(node, "state_lb"), + container = Array + ) + end + if !isnothing(node.state_ub) + node.con.state_ub = @constraint( + model, + [t = _iesopt(model).model.T], + node.var.state[t] <= _get(node.state_ub, t), + base_name = _base_name(node, "state_ub"), + container = Array + ) + end + + # Handle constraint safety (if enabled). + if node.constraint_safety + if !isnothing(node.state_lb) + @simd for t in _iesopt(model).model.T + _iesopt(model).aux.constraint_safety_penalties[node.con.state_lb[t]] = + (component_name=node.name, t=t, description="state_lb", penalty=node.constraint_safety_cost) + end + end + if !isnothing(node.state_ub) + @simd for t in _iesopt(model).model.T + _iesopt(model).aux.constraint_safety_penalties[node.con.state_ub[t]] = + (component_name=node.name, t=t, description="state_ub", penalty=node.constraint_safety_cost) + end + end + end +end diff --git a/src/core/node/exp_injection.jl b/src/core/node/exp_injection.jl new file mode 100644 index 0000000..8e3b68b --- /dev/null +++ b/src/core/node/exp_injection.jl @@ -0,0 +1,20 @@ +@doc raw""" + _node_exp_injection!(node::Node) + +Add an empty (`JuMP.AffExpr(0)`) expression to the `node` that keeps track of feed-in and withdrawal of energy. + +This constructs the expression ``\text{injection}_t, \forall t \in T`` that is utilized in +`node.con.nodalbalance`. Core components (`Connection`s, `Profile`s, and `Unit`s) that feed energy into +this node add to it, all others subtract from it. A stateless node forces this nodal balance to always equal `0` which +essentially describes "generation = demand". +""" +function _node_exp_injection!(node::Node) + model = node.model + + node.exp.injection = collect(JuMP.AffExpr(0) for _ in _iesopt(model).model.T) + + if !isnothing(node.etdf_group) + # Add this node's "net positions" (= it's injections) to the overall ETDF group. + push!(_iesopt(model).aux.etdf.groups[node.etdf_group], node.id) + end +end diff --git a/src/core/node/var_pf_theta.jl b/src/core/node/var_pf_theta.jl new file mode 100644 index 0000000..ece4fbe --- /dev/null +++ b/src/core/node/var_pf_theta.jl @@ -0,0 +1,35 @@ +@doc raw""" + _node_var_pf_theta!(model::JuMP.Model, node::Node) + +Construct the auxiliary phase angle variable for the `linear_angle` power flow algorithm. + +This needs the global `Powerflow` addon, configured with `mode: linear_angle`, and constructs a variable `var_pf_theta` +for each `Snapshot`. If the `pf_slack` property of this `Node` is set to `true`, it does not add a variable but sets +`var_pf_theta[t] = 0` for each `Snapshot`. +``` +""" +function _node_var_pf_theta!(node::Node) + model = node.model + + !haskey(_iesopt(model).input.addons, "Powerflow") && return + + @error "Global addon based powerflow is deprecated until we finished the move to PowerModels.jl" + return nothing + + if _has_representative_snapshots(model) + @error "Representative Snapshots are currently not supported for models using Powerflow" + end + + if _iesopt(model).input.addons["Powerflow"].config["__settings__"].mode === :linear_angle + if node.pf_slack + node.var.pf_theta = zeros(length(_iesopt(model).model.T)) + else + node.var.pf_theta = @variable( + model, + [t = _iesopt(model).model.T], + base_name = _base_name(node, "pf_theta"), + container = Array + ) + end + end +end diff --git a/src/core/node/var_state.jl b/src/core/node/var_state.jl new file mode 100644 index 0000000..23ddab2 --- /dev/null +++ b/src/core/node/var_state.jl @@ -0,0 +1,25 @@ +@doc raw""" + _node_var_state!(model::JuMP.Model, node::Node) + +Add the variable representing the state of this `node` to the `model`, if `node.has_state == true`. This can be accessed +via `node.var.state[t]`. + +Additionally, if the state's initial value is specified via `state_initial` the following gets added: +```math +\text{state}_1 = \text{state}_{initial} +``` +""" +function _node_var_state!(node::Node) + if !node.has_state + return nothing + end + + model = node.model + + node.var.state = + @variable(model, [t = _iesopt(model).model.T], base_name = _base_name(node, "state"), container = Array) + + if !isnothing(node.state_initial) + JuMP.fix(node.var.state[1], node.state_initial; force=false) + end +end diff --git a/src/core/profile.jl b/src/core/profile.jl new file mode 100644 index 0000000..b014dfc --- /dev/null +++ b/src/core/profile.jl @@ -0,0 +1,192 @@ +""" +A `Profile` allows representing "model boundaries" - parts of initial problem that are not endogenously modelled - with +a support for time series data. Examples are hydro reservoir inflows, electricity demand, importing gas, and so on. +Besides modelling fixed profiles, they also allow different ways to modify the value endogenously. +""" +@kwdef struct Profile <: _CoreComponent + # [Core] =========================================================================================================== + model::JuMP.Model + init_state::Ref{Symbol} = Ref(:empty) + constraint_safety::Bool + constraint_safety_cost::_ScalarInput + + # [Mandatory] ====================================================================================================== + name::_String + raw"""```{"mandatory": "yes", "values": "string", "default": "-"}``` + `Carrier` of this `Profile`. + """ + carrier::Carrier + + # [Optional] ======================================================================================================= + config::Dict{String, Any} = Dict() + ext::Dict{String, Any} = Dict() + addon::Union{String, Nothing} = nothing + conditional::Bool = false + + value::_OptionalExpression = nothing + node_from::Union{_String, Nothing} = nothing + node_to::Union{_String, Nothing} = nothing + + mode::Symbol = :fixed + lb::_OptionalExpression = nothing + ub::_OptionalExpression = nothing + cost::_OptionalExpression = nothing + + allow_deviation::Symbol = :off + cost_deviation::_OptionalScalarInput = nothing + + # [Internal] ======================================================================================================= + # - + + # [External] ======================================================================================================= + # results::Union{Dict, Nothing} = nothing + + # [Optimization Container] ========================================================================================= + _ccoc = _CoreComponentOptContainer() +end + +_result_fields(::Profile) = (:name, :carrier, :node_from, :node_to, :mode) + +function _prepare!(profile::Profile) + model = profile.model + + # Extract the carrier from the connected nodes. + if !isnothing(profile.node_from) && (profile.carrier != component(model, profile.node_from).carrier) + @critical "Profile mismatch" profile = profile.name node_from = profile.node_from + end + if !isnothing(profile.node_to) && (profile.carrier != component(model, profile.node_to).carrier) + @critical "Profile mismatch" profile = profile.name node_to = profile.node_to + end + + return true +end + +function _isvalid(profile::Profile) + if isnothing(profile.carrier) + @critical " could not be detected correctly" profile = profile.name + end + + if (profile.mode === :create) || (profile.mode === :destroy) + !isnothing(profile.lb) && (@warn "Setting is ignored" profile = profile.name mode = profile.mode) + !isnothing(profile.ub) && (@warn "Setting is ignored" profile = profile.name mode = profile.mode) + end + + if !(profile.mode in [:fixed, :create, :destroy, :ranged]) + @critical "Invalid " profile = profile.name + end + + if !isnothing(profile.value) && (profile.mode != :fixed) + @critical "Setting of Profile may result in unexpected behaviour, because is not `fixed`" profile = + profile.name mode = profile.mode + end + + if !isnothing(profile.cost_deviation) || (profile.allow_deviation != :off) + @error "Profile deviations are deprecated" profile = profile.name + end + + return true +end + +function _setup!(profile::Profile) + return nothing +end + +function _result(profile::Profile, mode::String, field::String; result::Int=1) + if field != "value" + @error "Profile cannot extract field" profile = profile.name field = field + return nothing + end + + if mode == "dual" + @error "Extracting of Profile is currently not supported" profile = profile.name + return nothing + end + + value = JuMP.value.(profile.exp.value; result=result) + + if mode == "value" + return "$(profile.name).value", value + elseif mode == "sum" + return "Profile.sum.value", sum(value) + end + + @error "Unknown result extraction" profile = profile.name mode = mode field = field + return nothing +end + +include("profile/exp_value.jl") +include("profile/var_aux_value.jl") +include("profile/con_value_bounds.jl") +include("profile/obj_cost.jl") + +function _construct_expressions!(profile::Profile) + @profile profile.model _profile_exp_value!(profile) + return nothing +end + +function _construct_variables!(profile::Profile) + @profile profile.model _profile_var_aux_value!(profile) + return nothing +end + +function _after_construct_variables!(profile::Profile) + model = profile.model + components = _iesopt(model).model.components + + if !isnothing(profile.value) + if (profile.mode === :fixed) && _iesopt_config(model).parametric + # Create all representatives. + _repr = Dict( + t => @variable(model, base_name = _base_name(profile, "aux_value[$(t)]")) for + t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + # Create all variables, either as themselves or their representative. + profile.var.aux_value = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + + # After all variables are constructed the `value` can be finalized and used. + _finalize(profile.value) + for t in _iesopt(model).model.T + _repr_t = + _iesopt(model).model.snapshots[t].is_representative ? t : + _iesopt(model).model.snapshots[t].representative + val = _get(profile.value, _repr_t) + + if (profile.mode === :fixed) && _iesopt_config(model).parametric + JuMP.fix(profile.var.aux_value[t], val; force=true) + JuMP.add_to_expression!(profile.exp.value[t], profile.var.aux_value[t]) + else + JuMP.add_to_expression!(profile.exp.value[t], val) + end + end + end + + # Now we can be sure that the expression is properly setup, add it to the respective Nodes. + if profile.node_from !== nothing + JuMP.add_to_expression!.(components[profile.node_from].exp.injection, profile.exp.value, -1.0) + end + if profile.node_to !== nothing + JuMP.add_to_expression!.(components[profile.node_to].exp.injection, profile.exp.value) + end + + # We can now also properly finalize the `lb`, `ub`, and `cost`. + !isnothing(profile.lb) && _finalize(profile.lb) + !isnothing(profile.ub) && _finalize(profile.ub) + !isnothing(profile.cost) && _finalize(profile.cost) + + return nothing +end + +function _construct_constraints!(profile::Profile) + @profile profile.model _profile_con_value_bounds!(profile) + return nothing +end + +function _construct_objective!(profile::Profile) + @profile profile.model _profile_obj_cost!(profile) + + return nothing +end diff --git a/src/core/profile/con_value_bounds.jl b/src/core/profile/con_value_bounds.jl new file mode 100644 index 0000000..775433a --- /dev/null +++ b/src/core/profile/con_value_bounds.jl @@ -0,0 +1,95 @@ +@doc raw""" + _profile_con_value_bounds!(model::JuMP.Model, profile::Profile) + +Add the constraint defining the bounds of this `profile` to the `model`. + +This heavily depends on the `mode` setting, as +it does nothing if the `mode` is set to `fixed`, or the `value` is actually controlled by an `Expression`. +The variable can be accessed via `profile.var.aux_value[t]`, but using the normal result extraction is recommended, +since that properly handles the `profile.exp.value` instead. + +Otherwise: + +> `if profile.mode === :create or profile.mode === :destroy` +> ```math +> \begin{aligned} +> & \text{aux_value}_t \geq 0, \qquad \forall t \in T +> \end{aligned} +> ``` + +> `if profile.mode === :ranged` +> ```math +> \begin{aligned} +> & \text{value}_t \geq \text{lb}_t, \qquad \forall t \in T \\ +> & \text{value}_t \leq \text{ub}_t, \qquad \forall t \in T +> \end{aligned} +> ``` + +Here, `lb` and `ub` can be left empty, which drops the respective constraint. +""" +function _profile_con_value_bounds!(profile::Profile) + model = profile.model + + if profile.mode === :fixed + # Since the whole Profile "value" is already handled, we do not need to constraint it. + return nothing + end + + # Constrain the `value` based on the setting of `mode`. + if profile.mode === :ranged + if !isnothing(profile.lb) + if !_has_representative_snapshots(model) + profile.con.value_lb = @constraint( + model, + [t = _iesopt(model).model.T], + profile.var.aux_value[t] >= _get(profile.lb, t), + base_name = _base_name(profile, "value_lb"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @constraint( + model, + profile.var.aux_value[t] >= _get(profile.lb, t), + base_name = _base_name(profile, "value_lb[$(t)]") + ) for t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all constraints, either as themselves or their representative. + profile.con.value_lb = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + end + if !isnothing(profile.ub) + if !_has_representative_snapshots(model) + profile.con.value_ub = @constraint( + model, + [t = _iesopt(model).model.T], + profile.var.aux_value[t] <= _get(profile.ub, t), + base_name = _base_name(profile, "value_ub"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @constraint( + model, + profile.var.aux_value[t] <= _get(profile.ub, t), + base_name = _base_name(profile, "value_ub[$(t)]") + ) for t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all constraints, either as themselves or their representative. + profile.con.value_ub = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + end + end + + return nothing +end diff --git a/src/core/profile/exp_value.jl b/src/core/profile/exp_value.jl new file mode 100644 index 0000000..470dbf9 --- /dev/null +++ b/src/core/profile/exp_value.jl @@ -0,0 +1,19 @@ +@doc raw""" + _profile_exp_value!(model::JuMP.Model, profile::Profile) + +Cosntruct the `JuMP.AffExpr` that keeps the total value of this `Profile` for each `Snapshot`. + +This is skipped if the `value` of this `Profile` is handled by an `Expression`. Otherwise it is intialized +based on `profile.value`. +""" +function _profile_exp_value!(profile::Profile) + model = profile.model + + profile.exp.value = Vector{JuMP.AffExpr}() + sizehint!(profile.exp.value, length(_iesopt(model).model.T)) + for t in _iesopt(model).model.T + push!(profile.exp.value, JuMP.AffExpr(0.0)) + end + + return nothing +end diff --git a/src/core/profile/obj_cost.jl b/src/core/profile/obj_cost.jl new file mode 100644 index 0000000..9f165f4 --- /dev/null +++ b/src/core/profile/obj_cost.jl @@ -0,0 +1,35 @@ +@doc raw""" + _profile_obj_cost!(model::JuMP.Model, profile::Profile) + +Add the (potential) cost of this `Profile` to the global objective function. + +The `profile.cost` setting specifies a potential cost for the creation ("resource costs", i.e. importing gas into the +model) or destruction ("penalties", i.e. costs linked to the emission of CO2). It can have a unique value for every +`Snapshot`, i.e. allowing to model a time-varying gas price throughout the year. + +The contribution to the global objective function is as follows: +```math +\sum_{t\in T} \text{value}_t \cdot \text{profile.cost}_t \cdot \omega_t +``` + +Here $\omega_t$ is the `weight` of `Snapshot` `t`, and ``\text{value}_t`` actually refers to the value of +`profile.exp.value[t]` (and not only on the maybe non-existing variable). +""" +function _profile_obj_cost!(profile::Profile) + if isnothing(profile.cost) + return nothing + end + + model = profile.model + + # todo: this is inefficient: we are building up an AffExpr to add it to the objective; instead: add each term + # todo: furthermore, this always calls VariableRef * Float, which is inefficient, and could be done in add_to_expression + profile.obj.cost = JuMP.AffExpr(0.0) + for t in _iesopt(model).model.T + JuMP.add_to_expression!(profile.obj.cost, profile.exp.value[t], _weight(model, t) * _get(profile.cost, t)) + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, profile.obj.cost) + + return nothing +end diff --git a/src/core/profile/var_aux_value.jl b/src/core/profile/var_aux_value.jl new file mode 100644 index 0000000..93231dc --- /dev/null +++ b/src/core/profile/var_aux_value.jl @@ -0,0 +1,63 @@ +@doc raw""" + _profile_var_aux_value!(model::JuMP.Model, profile::Profile) + +Add the variable that is used in this `Profile`s value to the `model`. + +The variable `var_value[t]` is constructed and is linked to the correct `Node`s. There are different ways, IESopt +interprets this, based on the setting of `profile.mode`: + +1. **fixed**: The value is already handled by the constant term of `profile.exp.value` and NO variable is constructed. +2. **create**, **destroy**, or **ranged**: This models the creation or destruction of energy - used mainly to represent model + boundaries, and energy that comes into the model or leaves the model's scope. It is however important that `create` should mostly be used feeding into a `Node` (`profile.node_from = nothing`) and + `destroy` withdrawing from a `Node` (`profile.node_to = nothing`). If `lb` and `ub` are defined, `ranged` can be used + that allows a more detailled control over the `Profile`, specifying upper and lower bounds for every `Snapshot`. See + `_profile_con_value_bounds!(profile::Profile)` for details on the specific bounds for each case. + +This variable is added to the `profile.exp.value`. Additionally, the energy (that `profile.exp.value` represents) +gets "injected" at the `Node`s that the `profile` is connected to, resulting in +```math +\begin{aligned} + & \text{profile.node}_{from}\text{.injection}_t = \text{profile.node}_{from}\text{.injection}_t - \text{value}_t, \qquad \forall t \in T \\ + & \text{profile.node}_{to}\text{.injection}_t = \text{profile.node}_{to}\text{.injection}_t + \text{value}_t, \qquad \forall t \in T +\end{aligned} +``` +""" +function _profile_var_aux_value!(profile::Profile) + model = profile.model + + if profile.mode === :fixed + # This Profile's value is already added to the value expression. Nothing to do here. + else + # Create the variable. + if !_has_representative_snapshots(model) + profile.var.aux_value = @variable( + model, + [t = _iesopt(model).model.T], + base_name = _base_name(profile, "aux_value"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @variable(model, base_name = _base_name(profile, "aux_value[$(t)]")) for + t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all variables, either as themselves or their representative. + profile.var.aux_value = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + + if (profile.mode === :create) || (profile.mode === :destroy) + # Properly set the lower bound. + JuMP.set_lower_bound.(profile.var.aux_value, 0) + end + + # Add it to the expression. + JuMP.add_to_expression!.(profile.exp.value, profile.var.aux_value) + end + + return nothing +end diff --git a/src/core/snapshot.jl b/src/core/snapshot.jl new file mode 100644 index 0000000..179f994 --- /dev/null +++ b/src/core/snapshot.jl @@ -0,0 +1,132 @@ +""" + struct Snapshot + name + id + weight + end + +Represent a specific timestamp, that can be tied to timeseries values. + +Each `Snapshot` expects a `name`, that can be used to hold a timestamp (as `String`; therefore supporting arbitrary +formats). The `weight` (default = 1.0) specifies the "probabilistic weight" of this `Snapshot` or the length of the +timeperiod that **begins** there (a `weight` of 2 can therefore represent a 2-hour-resolution; this also allows a +variable temporal resolution throughout the year/month/...). +""" +Base.@kwdef struct Snapshot + # mandatory + name::_String + id::_ID + + # optional + weight::_ScalarInput = 1.0 + + is_representative::Bool = true + representative::_ID +end + +function _parse_snapshots!(model::JuMP.Model) + config = _iesopt_config(model).optimization.snapshots + + # Check for Snapshot aggregation. + if !isnothing(config.aggregate) + @warn "Snapshot aggregation is an experimental feature, that does not work correctly with expressions (and maybe other stuff) - using it is not advised" + + T_orig, T_factor = config.count, config.aggregate + + if (T_orig ÷ T_factor) != (T_orig / T_factor) + @critical "Cannot aggregate snapshots based on non exact divisor" T = T_orig div = T_factor + end + + T = _ID(T_orig ÷ T_factor) + _iesopt(model).model.snapshots = Dict{_ID, Snapshot}( + t => Snapshot(; name="S$t ", id=t, weight=T_factor) for t in 1:T + ) + + # _iesopt_config(model)._perform_snapshot_aggregation = T_factor # todo: config refactor + _iesopt(model).model.T = 1:T + + @info "Aggregated into $(length(_iesopt(model).model.snapshots)) snapshots" + return nothing + end + + # Set up `T`. + _iesopt(model).model.T = _ID.(1:(config.count)) + + # Set up Snapshot names. + if !isnothing(config.names) + column, file = String.(split(config.names, "@")) + # Make sure the parsed column is actually interpreted as Strings. + names = string.(_getfromcsv(model, file, column)) + else + names = ["t$t" for t in _iesopt(model).model.T] + end + + # Set up Snapshot weights. + if !isnothing(config.weights) + if config.weights isa String + column, file = String.(split(config.weights, "@")) + weights = _getfromcsv(model, file, column) + elseif config.weights isa Number + weights = ones(length(_iesopt(model).model.T)) .* config.weights + end + else + weights = ones(length(_iesopt(model).model.T)) + end + + # Set up repesentatives and representation information. + if !isnothing(config.representatives) + column, file = String.(split(config.representatives, "@")) + _repr = _getfromcsv(model, file, column) + is_representative = ismissing.(_repr) + + repr_indices = [ismissing(_repr[t]) ? missing : findfirst(==(_repr[t]), names) for t in _iesopt(model).model.T] + if any(isnothing.(repr_indices)) + @critical "Missing repesentative detected; make sure that all names are actual Snapshot names" + end + + representatives = + [ismissing(_repr[t]) ? t : _iesopt(model).model.T[repr_indices[t]] for t in _iesopt(model).model.T] + + @info "Activated representative Snapshots" n = sum(is_representative) + _has_representative_snapshots(model) = true + + if any(weights[t] != weights[1] for t in _iesopt(model).model.T[2:end]) + @critical "Representative Snapshots require equal `weights` for every Snapshot" + end + else + is_representative = ones(Bool, length(_iesopt(model).model.T)) + representatives = _iesopt(model).model.T + end + + _iesopt(model).model.snapshots = Dict{_ID, Snapshot}( + t => Snapshot(; + name=names[t], + id=t, + weight=weights[t], + is_representative=is_representative[t], + representative=representatives[t], + ) for t in _iesopt(model).model.T + ) + + @info "Parsed $(length(_iesopt(model).model.snapshots)) snapshots" + + return nothing +end + +_snapshot(model::JuMP.Model, t::_ID) = _iesopt(model).model.snapshots[t] +_weight(model::JuMP.Model, t::_ID) = _iesopt(model).model.snapshots[t].weight # TODO: this should safety check with non-equal weights and representative snapshots + +function _snapshot_aggregation!(model::JuMP.Model, data::Vector) + config = _iesopt_config(model).optimization.snapshots + + if !isnothing(config.aggregate) + if length(data) == 1 + return data + end + + scale = config.aggregate + return [sum(@view data[((t - 1) * scale + 1):(t * scale)]) / scale for t in _iesopt(model).model.T] + else + return data + end +end diff --git a/src/core/unit.jl b/src/core/unit.jl new file mode 100644 index 0000000..ff12561 --- /dev/null +++ b/src/core/unit.jl @@ -0,0 +1,417 @@ +""" +A `Unit` allows transforming one (or many) forms of energy into another one (or many), given some constraints and costs. +""" +@kwdef struct Unit <: _CoreComponent + # [Core] =========================================================================================================== + model::JuMP.Model + init_state::Ref{Symbol} = Ref(:empty) + constraint_safety::Bool + constraint_safety_cost::_ScalarInput + + # [Mandatory] ====================================================================================================== + name::_String + + raw"""```{"mandatory": "yes", "values": "string", "default": "-"}``` + The conversion expression describing how this `Unit` transforms energy. Specified in the form of "$\alpha \cdot + carrier_1 + \beta \cdot carrier_2$ -> $\gamma \cdot carrier_3 + \delta \cdot carrier_4$". Coefficients allow simple + numerical calculations, but are not allowed to include spaces (so e.g. `(1.0/9.0)` is valid). Coefficients are + allowed to be `NumericalInput`s, resulting in `column@data_file` being a valid coefficient (this can be used e.g. + for time-varying COPs of heatpumps). + """ + conversion::_String + + raw"""```{"mandatory": "yes", "values": "string", "default": "-"}``` + Maximum capacity of this `Unit`, to be given in the format `X in/out:carrier` where `X` is the amount, `in` or `out` + (followed by `:`) specifies whether the limit is to be placed on the in- our output of this `Unit`, and `carrier` + specifies the respective `Carrier`. Example: `100 in:electricity` (to limit the "input rating"). + """ + capacity::_Expression + + # [Optional] ======================================================================================================= + config::Dict{String, Any} = Dict() + ext::Dict{String, Any} = Dict() + addon::Union{String, Nothing} = nothing + conditional::Bool = false + + inputs::Dict{Carrier, String} = Dict() + outputs::Dict{Carrier, String} = Dict() + + availability::_OptionalExpression = nothing + availability_factor::_OptionalExpression = nothing + adapt_min_to_availability::Bool = false + + marginal_cost::_OptionalExpression = nothing + + enable_ramp_up::Bool = false + enable_ramp_down::Bool = false + ramp_up_cost::_OptionalScalarInput = nothing + ramp_down_cost::_OptionalScalarInput = nothing + ramp_up_limit::_OptionalScalarInput = nothing + ramp_down_limit::_OptionalScalarInput = nothing + + min_on_time::_OptionalScalarInput = nothing + min_off_time::_OptionalScalarInput = nothing + on_time_before::_ScalarInput = 0 + off_time_before::_ScalarInput = 0 + is_on_before::_Bound = 1 # todo: why is this a bound (and not _OptionalScalarInput) + + unit_commitment::Symbol = :off + unit_count::_OptionalExpression # default=1 is enforced in `parser.jl` + min_conversion::_OptionalScalarInput = nothing + conversion_at_min::_OptionalString = nothing + startup_cost::_OptionalScalarInput = nothing + + # [Internal] ======================================================================================================= + conversion_dict::Dict{Symbol, Dict{Carrier, _NumericalInput}} = Dict(:in => Dict(), :out => Dict()) + conversion_at_min_dict::Dict{Symbol, Dict{Carrier, _NumericalInput}} = Dict(:in => Dict(), :out => Dict()) + + capacity_carrier::NamedTuple{(:inout, :carrier), Tuple{Symbol, Carrier}} + marginal_cost_carrier::Union{Nothing, NamedTuple{(:inout, :carrier), Tuple{Symbol, Carrier}}} = nothing + + # [External] ======================================================================================================= + # results::Union{Dict, Nothing} = nothing + + # [Optimization Container] ========================================================================================= + _ccoc = _CoreComponentOptContainer() +end + +_result_fields(::Unit) = (:name, :inputs, :outputs, :unit_commitment) + +_total(unit::Unit, direction::Symbol, carrier::AbstractString) = + _total(unit, direction, string(carrier))::Vector{JuMP.AffExpr} +function _total(unit::Unit, direction::Symbol, carrier::String)::Vector{JuMP.AffExpr} + if !_has_cache(unit.model, :unit_total) + _iesopt_cache(unit.model)[:unit_total] = + Dict{Symbol, Dict{String, Symbol}}(:in => Dict{String, Symbol}(), :out => Dict{String, Symbol}()) + end + + cache::Dict{String, Symbol} = _get_cache(unit.model, :unit_total)[direction] + if !haskey(cache, carrier) + cache[carrier] = Symbol("$(direction)_$(carrier)") + end + return unit.exp[cache[carrier]] +end + +function _prepare!(unit::Unit) + # todo: "null" in the ThermalGen component translates to "nothing" (as String) instead of nothing (as Nothing)! + model = unit.model + carriers = _iesopt(model).model.carriers + + # Prepare in/out total expressions. + for carrier in keys(unit.inputs) + _vec = Vector{JuMP.AffExpr}(undef, _iesopt(model).model.T[end]) + for i in eachindex(_vec) + _vec[i] = JuMP.AffExpr(0.0) + end + unit.exp[Symbol("in_$(carrier.name)")] = _vec + end + for carrier in keys(unit.outputs) + _vec = Vector{JuMP.AffExpr}(undef, _iesopt(model).model.T[end]) + for i in eachindex(_vec) + _vec[i] = JuMP.AffExpr(0.0) + end + unit.exp[Symbol("out_$(carrier.name)")] = _vec + end + + # Convert string formula to proper conversion dictionary. + @profile unit.model _convert_unit_conversion_dict!(carriers, unit) # todo: stop passing carriers, as soon as there is unit._model + + # Normalize the conversion expressions to allow correct handling later on. + @profile unit.model _normalize_conversion_expressions!(unit) + + return true +end + +function _isvalid(unit::Unit) + model = unit.model + + components = _iesopt(model).model.components + + # Check that input carriers match. + if !isnothing(unit.inputs) + for (carrier, cname) in unit.inputs + if carrier != components[cname].carrier + @critical "Unit got wrong input carrier" unit = unit.name carrier = carrier input = + components[cname].name + end + end + end + + # Check that output carriers match. + if !isnothing(unit.outputs) + for (carrier, cname) in unit.outputs + if carrier != components[cname].carrier + @critical "Unit got wrong output carrier" unit = unit.name carrier = carrier output = + components[cname].name + end + end + end + + # Check that we can actually construct the necessary constraints. + # todo: rework this + # if unit.unit_commitment != :off + # if !isa(unit.capacity_carrier.value, _ScalarInput) + # for (coeff, var) in unit.capacity_carrier.value.variables + # if !var.comp.fixed_size + # @error "Using an active as well as an endogenous capacity is currently not supported" unit = + # unit.name decision = var.comp.name + # return false + # end + # end + # end + # end + + if !_is_milp(model) && !(unit.unit_commitment === :off || unit.unit_commitment === :linear) + @critical "Model config only allows LP" unit = unit.name unit_commitment = unit.unit_commitment + end + + # Warn the user of possible misconfigurations. + if (unit.unit_commitment === :off) && (!isnothing(unit.min_conversion)) + @warn "Setting while is off can lead to issues" unit = unit.name + end + + if (unit.enable_ramp_up || unit.enable_ramp_down) && (_get(unit.unit_count) != 1) + @warn "Active ramps do not work as expected with different from 1" unit = unit.name + end + + # A Unit can not be up/and down before the time horizon. + if (unit.on_time_before != 0) && (unit.off_time_before != 0) + @critical "A Unit can not be up and down before starting the optimization" unit = unit.name + end + + # Check if `on_before` and `up/off_time_before` match. + if (unit.is_on_before != 0) && (unit.off_time_before != 0) + @critical "A Unit can not be on before the optimization and have down time" unit = unit.name + end + if (unit.is_on_before == 0) && (unit.on_time_before != 0) + @critical "A Unit can not be off before the optimization and have up time" unit = unit.name + end + + # todo: resolve the issue and then remove this + if (_get(unit.unit_count) != 1) && (!isnothing(unit.min_on_time) || !isnothing(unit.min_off_time)) + @critical "min_on_time/min_off_time is currently not supported for Units with `unit.count > 1`" unit = unit.name + end + + # todo: resolve the issue and then remove this + if ( + (!isnothing(unit.min_on_time) || !isnothing(unit.min_off_time)) && + any(_weight(model, t) != 1 for t in _iesopt(model).model.T[2:end]) + ) + @warn "min_on_time/min_off_time is NOT tested for Snapshot weights != 1" unit = unit.name + end + + if _has_representative_snapshots(model) && (unit.unit_commitment != :off) + @critical "Active unit commitment is currently not supported for models with representative Snapshots" unit = + unit.name + end + + if (unit.enable_ramp_up || unit.enable_ramp_down) && _has_representative_snapshots(model) + @critical "Enabled ramps are currently not supported while using representative Snapshots" unit = unit.name + end + + return true +end + +function _setup!(unit::Unit) + model = unit.model + + return nothing +end + +function _result(unit::Unit, mode::String, field::String; result::Int=1) + if isnothing(findfirst("out:", field)) && isnothing(findfirst("in:", field)) + # This is not the `in:XXX` or `out:XXX` value of conversion. + if (field == "ison") && (mode == "value") && (unit.unit_commitment != :off) + return "$(unit.name).ison", JuMP.value.(unit.var.ison; result=result) + end + else + # # This is the `in:XXX` or `out:XXX` value of conversion. + dir, carrier = split(field, ":") + if _has_representative_snapshots(unit.model) + value = [ + JuMP.value( + _total(unit, Symbol(dir), carrier)[_iesopt(unit.model).model.snapshots[t].representative]; + result=result, + ) for t in _iesopt(unit.model).model.T + ] + else + value = JuMP.value.(_total(unit, Symbol(dir), carrier); result=result) + end + + if mode == "value" + return "$(unit.name).$dir.$carrier", value + elseif mode == "sum" + return "Unit.sum.$dir.$carrier", sum(value) + end + end + + @error "Unknown result extraction" unit = unit.name mode = mode field = field + return nothing +end + +include("unit/var_conversion.jl") +include("unit/var_ramp.jl") +include("unit/var_ison.jl") +include("unit/var_startup.jl") +include("unit/con_conversion_bounds.jl") +include("unit/con_ison.jl") +include("unit/con_min_onoff_time.jl") +include("unit/con_startup.jl") +include("unit/con_ramp.jl") +include("unit/con_ramp_limit.jl") +include("unit/obj_marginal_cost.jl") +include("unit/obj_startup_cost.jl") +include("unit/obj_ramp_cost.jl") + +function _construct_variables!(unit::Unit) + # Since all `Decision`s are constructed before this `Unit`, we can now properly finalize the `availability`, + # `availability_factor`, `unit_count`, `capacity`, and `marginal_cost`. + !isnothing(unit.availability) && _finalize(unit.availability) + !isnothing(unit.availability_factor) && _finalize(unit.availability_factor) + !isnothing(unit.unit_count) && _finalize(unit.unit_count) + !isnothing(unit.capacity) && _finalize(unit.capacity) + !isnothing(unit.marginal_cost) && _finalize(unit.marginal_cost) + + # `var_ison` needs to be constructed before `var_conversion` + @profile unit.model _unit_var_ison!(unit) + + @profile unit.model _unit_var_conversion!(unit) + @profile unit.model _unit_var_ramp!(unit) + @profile unit.model _unit_var_startup!(unit) + + return nothing +end + +function _construct_constraints!(unit::Unit) + @profile unit.model _unit_con_conversion_bounds!(unit) + @profile unit.model _unit_con_ison!(unit) + @profile unit.model _unit_con_min_onoff_time!(unit) + @profile unit.model _unit_con_startup!(unit) + @profile unit.model _unit_con_ramp!(unit) + @profile unit.model _unit_con_ramp_limit!(unit) + + return nothing +end + +function _construct_objective!(unit::Unit) + @profile unit.model _unit_obj_marginal_cost!(unit) + @profile unit.model _unit_obj_startup_cost!(unit) + @profile unit.model _unit_obj_ramp_cost!(unit) + + return nothing +end + +function _convert_unit_conversion_dict!(carriers::Dict{String, Carrier}, unit::Unit) + # Convert the mandatory conversion. + lhs, rhs = split(unit.conversion, "->") + lhs = split(lhs, "+") + rhs = split(rhs, "+") + for item in lhs + item = strip(item) + item == "~" && continue + mult, carrier_str = split(item, " ") + if !isnothing(findfirst('@', mult)) + unit.conversion_dict[:in][carriers[carrier_str]] = _conv_S2NI(unit.model, mult) # todo ???? + else + unit.conversion_dict[:in][carriers[carrier_str]] = _conv_S2NI(unit.model, mult) # todo ???? + end + end + for item in rhs + item = strip(item) + item == "~" && continue + mult, carrier_str = split(item, " ") + if !isnothing(findfirst('@', mult)) + unit.conversion_dict[:out][carriers[carrier_str]] = _conv_S2NI(unit.model, mult) # todo ???? + else + unit.conversion_dict[:out][carriers[carrier_str]] = _conv_S2NI(unit.model, mult) # todo ???? + end + end + + isnothing(unit.conversion_at_min) && return + + # Convert the optional "minconversion" conversion. + lhs, rhs = split(unit.conversion_at_min, "->") + lhs = split(lhs, "+") + rhs = split(rhs, "+") + for item in lhs + item = strip(item) + item == "~" && continue + mult, carrier_str = split(item, " ") + unit.conversion_at_min_dict[:in][carriers[carrier_str]] = _conv_S2NI(unit.model, mult) + end + for item in rhs + item = strip(item) + item == "~" && continue + mult, carrier_str = split(item, " ") + unit.conversion_at_min_dict[:out][carriers[carrier_str]] = _conv_S2NI(unit.model, mult) + end + + return nothing +end + +function _normalize_conversion_expressions!(unit::Unit) + # Normalize default conversion expression. + norm = unit.conversion_dict[unit.capacity_carrier.inout][unit.capacity_carrier.carrier] + for dir in [:in, :out] + for (carrier, val) in unit.conversion_dict[dir] + unit.conversion_dict[dir][carrier] = val ./ norm + end + end + + # Normalize min_conversion expression. + if !isnothing(unit.conversion_at_min) + norm = unit.conversion_at_min_dict[unit.capacity_carrier.inout][unit.capacity_carrier.carrier] + for dir in [:in, :out] + for (carrier, val) in unit.conversion_at_min_dict[dir] + unit.conversion_at_min_dict[dir][carrier] = val ./ norm + + if any( + unit.conversion_dict[dir][carrier] .≈ + unit.min_conversion .* unit.conversion_at_min_dict[dir][carrier], + ) + @warn "Linearization of efficiencies resulting in unexpected behaviour" unit = unit.name dir = dir carrier = + carrier.name + end + end + end + end + + return nothing +end + +function _unit_capacity_limits(unit::Unit) + # Get correct maximum. + if !isnothing(unit.availability_factor) + max_conversion = min.(1.0, _get(unit.availability_factor)) + elseif !isnothing(unit.availability) + if !isnothing(unit.capacity.decisions) && length(unit.capacity.decisions) > 0 + @critical "Endogenuous and are currently not supported" unit = unit.name + end + max_conversion = min.(1.0, _get(unit.availability) ./ _get(unit.capacity)) + else + max_conversion = 1.0 + end + + # Calculate max / online conversion based on unit commitment. + if unit.unit_commitment === :off + max_conversion = max_conversion .* _get(unit.unit_count) + online_conversion = max_conversion + else + online_conversion = max_conversion .* unit.var.ison # var_ison already includes unit.unit_count + max_conversion = max_conversion .* _get(unit.unit_count) + end + + if isnothing(unit.min_conversion) + # We are not limiting the min conversion. + return Dict{Symbol, Any}(:min => 0.0, :online => online_conversion, :max => max_conversion) + end + + return Dict{Symbol, Any}( + :min => unit.min_conversion .* (unit.adapt_min_to_availability ? online_conversion : unit.var.ison), + :online => online_conversion, + :max => max_conversion, + ) +end + +# todo: Why is `total` being indexed using carrier names (strings)? +get_total(unit::Unit, direction::String, carrier::String) = _total(unit, Symbol(direction), carrier) diff --git a/src/core/unit/con_conversion_bounds.jl b/src/core/unit/con_conversion_bounds.jl new file mode 100644 index 0000000..f748aa0 --- /dev/null +++ b/src/core/unit/con_conversion_bounds.jl @@ -0,0 +1,88 @@ +@doc raw""" + _unit_con_conversion_bounds!(model::JuMP.Model, unit::Unit) + +Add the constraint defining the `unit`'s conversion bounds to the `model`. + +This makes use of the current `min_capacity` (describing the lower limit of conversion; either 0 if no minimum load +applies or the respective value of the minimum load) as well as the `online_capacity` (that can either be the full +capacity if unit commitment is disabled, or the amount that is currently active). + +Depending on how the "availability" of this `unit` is handled it constructs the following constraints: + +> `if !isnothing(unit.availability)` +> ```math +> \begin{aligned} +> & \text{conversion}_t \geq \text{capacity}_{\text{min}, t}, \qquad \forall t \in T \\ +> & \text{conversion}_t \leq \text{capacity}_{\text{online}, t}, \qquad \forall t \in T \\ +> & \text{conversion}_t \leq \text{availability}_t, \qquad \forall t \in T +> \end{aligned} +> ``` + +This effectively results in +``\text{conversion}_t \leq \min(\text{capacity}_{\text{online}, t}, \text{availability}_t)``. + +> `if !isnothing(unit.availability_factor)` +> ```math +> \begin{aligned} +> & \text{conversion}_t \geq \text{capacity}_{\text{min}, t}, \qquad \forall t \in T \\ +> & \text{conversion}_t \leq \text{capacity}_{\text{online}, t} \cdot \text{availability}_{\text{factor}, t}, \qquad \forall t \in T +> \end{aligned} +> ``` + +!!! info + If one is able to choose between using `availability` or `availability_factor` (e.g. for restricting available + capacity during a planned revision to half the units capacity), enabling `availability_factor` (in this example 0.5) + will result in a faster model (build and probably solve) since it makes use of one less constraint. + +If no kind of availability limiting takes place, the following bounds are enforced: + +> ```math +> \begin{aligned} +> & \text{conversion}_t \geq \text{capacity}_{\text{min}, t}, \qquad \forall t \in T \\ +> & \text{conversion}_t \leq \text{capacity}_{\text{online}, t}, \qquad \forall t \in T +> \end{aligned} +> ``` +""" +function _unit_con_conversion_bounds!(unit::Unit) + model = unit.model + + limits = _unit_capacity_limits(unit) + + # Construct the lower bound. + # `var_conversion[t] >= 0` + # which is already covered in the construction of the variable. + + if isnothing(limits[:online]) + return nothing + end + + if !_has_representative_snapshots(model) + # Construct the upper bound. + unit.con.conversion_ub = @constraint( + model, + [t = _iesopt(model).model.T], + _get(limits[:min], t) * _get(unit.capacity, t) + unit.var.conversion[t] <= + _get(limits[:online], t) * _get(unit.capacity, t), + base_name = _base_name(unit, "conversion_ub"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @constraint( + model, + _get(limits[:min], t) * _get(unit.capacity, t) + unit.var.conversion[t] <= + _get(limits[:online], t) * _get(unit.capacity, t), + base_name = _base_name(unit, "conversion_ub[t]") + ) for t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all constraints, either as themselves or their representative. + unit.con.conversion_ub = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + + return nothing +end diff --git a/src/core/unit/con_ison.jl b/src/core/unit/con_ison.jl new file mode 100644 index 0000000..d18928f --- /dev/null +++ b/src/core/unit/con_ison.jl @@ -0,0 +1,22 @@ +@doc raw""" + _unit_con_ison!(unit::Unit) + +Construct the upper bound for `var_ison`, based on `unit.unit_count`, if it is handled by an external `Decision`. +""" +function _unit_con_ison!(unit::Unit) + if (unit.unit_commitment === :off) || isa(_get(unit.unit_count), Number) + return nothing + end + + model = unit.model + + unit.con.ison_ub = @constraint( + model, + [t = _iesopt(model).model.T], + unit.var.ison[t] <= _get(unit.unit_count), + base_name = _base_name(unit, "ison_ub"), + container = Array + ) + + return nothing +end diff --git a/src/core/unit/con_min_onoff_time.jl b/src/core/unit/con_min_onoff_time.jl new file mode 100644 index 0000000..7a71f5f --- /dev/null +++ b/src/core/unit/con_min_onoff_time.jl @@ -0,0 +1,145 @@ +@doc raw""" + _unit_con_min_onoff_time!(model::JuMP.Model, unit::Unit) + +Add the constraints modeling min on- or off-time of a `Unit` to the `model`. + +This constructs the constraints +```math +\begin{align} + & \sum_{t' = t}^{t + \text{min\_on\_time}} ison_{t'} >= \text{min\_on\_time} \cdot (ison_t - ison_{t-1}) \qquad \forall t \in T \\ + & \sum_{t' = t}^{t + \text{min\_off\_time}} (1 - ison_{t'}) >= \text{min\_off\_time} \cdot (ison_{t-1} - ison_t) \qquad \forall t \in T +\end{align} + +respecting `on_time_before` and `off_time_before`, and `is_on_before`. See the code for more details. + +!!! info "Aggregated units" + This is currently not fully adapted to account for `Unit`s with `unit_count > 1`. +``` +""" +function _unit_con_min_onoff_time!(unit::Unit) + if unit.unit_commitment === :off + return nothing + end + + model = unit.model + T = _iesopt(model).model.T + + # Pre-calculate the cumulative sum of all Snapshots. + duration_sum = cumsum(_weight(model, t) for t in T) + + if !isnothing(unit.min_on_time) + # Calculate set of Snapshots for each `t` that belong to the time set. + T_min_on = Vector{NamedTuple{(:t_end, :force, :max_dur), Tuple{_ID, Bool, Int64}}}() + sizehint!(T_min_on, length(T)) + for t in T + if (unit.is_on_before != 0) && (duration_sum[t] <= (unit.min_on_time - unit.on_time_before)) + push!(T_min_on, (t_end=0, force=true, max_dur=0)) + continue + end + + # Calculate the "time" until which we need to check. + t_end = duration_sum[t] + (unit.min_on_time - _weight(model, t)) + # Check all "timings" if we reached the previously calculated time, so that we can "cut off". + cutoff = duration_sum[t:end] .>= t_end + + max_dur = unit.min_on_time + if !any(cutoff) + # We are overlapping with the end of the horizon. + max_dur = duration_sum[end] - duration_sum[t] + _weight(model, t) + t_end = length(T) + else + # Get the first Snapshot that fulfils the duration. + t_end = argmax(cutoff) + (t - 1) + + # Check if that match is exact. + if (duration_sum[t_end] - duration_sum[t] + _weight(model, t)) != unit.min_on_time + @warn "Unit minimum up time not matching with differing Snapshot weights" unit = unit.name + end + end + + push!(T_min_on, (t_end=t_end, force=false, max_dur=max_dur)) + end + + # Construct the constraints. + unit.con.min_on_time = Vector{JuMP.ConstraintRef}() + sizehint!(unit.con.min_on_time, length(T)) + for t in T + if T_min_on[t].force + push!( + unit.con.min_on_time, + @constraint(model, unit.var.ison[t] == 1, base_name = _base_name(unit, "min_on_time[$t]")) + ) + else + prev = (t == 1) ? Int64(unit.is_on_before) : unit.var.ison[t - 1] + push!( + unit.con.min_on_time, + @constraint( + model, + sum(unit.var.ison[_t] for _t in t:(T_min_on[t].t_end)) >= + T_min_on[t].max_dur * (unit.var.ison[t] - prev), + base_name = _base_name(unit, "min_on_time[$t]") + ) + ) + end + end + end + + if !isnothing(unit.min_off_time) + # Calculate set of Snapshots for each `t` that belong to the time set. + T_min_off = Vector{NamedTuple{(:t_end, :force, :max_dur), Tuple{_ID, Bool, Int64}}}() + sizehint!(T_min_off, length(T)) + for t in T + if (unit.is_on_before == 0) && (duration_sum[t] <= (unit.min_off_time - unit.off_time_before)) + push!(T_min_off, (t_end=0, force=true, max_dur=0)) + continue + end + + # Calculate the "time" until which we need to check. + t_end = duration_sum[t] + (unit.min_off_time - _weight(model, t)) + # Check all "timings" if we reached the previously calculated time, so that we can "cut off". + cutoff = duration_sum[t:end] .>= t_end + + max_dur = unit.min_off_time + if !any(cutoff) + # We are overlapping with the end of the horizon. + max_dur = duration_sum[end] - duration_sum[t] + _weight(model, t) + t_end = length(T) + else + # Get the first Snapshot that fulfils the duration. + t_end = argmax(cutoff) + (t - 1) + + # Check if that match is exact. + if (duration_sum[t_end] - duration_sum[t] + _weight(model, t)) != unit.min_off_time + @warn "Unit minimum down time not matching with differing Snapshot weights" unit = unit.name + end + end + + push!(T_min_off, (t_end=t_end, force=false, max_dur=max_dur)) + end + + # Construct the constraints. + unit.con.min_off_time = Vector{JuMP.ConstraintRef}() + sizehint!(unit.con.min_off_time, length(T)) + for t in T + if T_min_off[t].force + push!( + unit.con.min_off_time, + @constraint(model, unit.var.ison[t] == 0, base_name = _base_name(unit, "min_off_time[$t]")) + ) + else + prev = (t == 1) ? Int64(unit.is_on_before) : unit.var.ison[t - 1] + push!( + unit.con.min_off_time, + @constraint( + model, + sum((1 - unit.var.ison[_t]) for _t in t:(T_min_off[t].t_end)) >= + T_min_off[t].max_dur * (prev - unit.var.ison[t]), + base_name = _base_name(unit, "min_off_time[$t]") + ) + ) + end + end + end + + return nothing +end diff --git a/src/core/unit/con_ramp.jl b/src/core/unit/con_ramp.jl new file mode 100644 index 0000000..84b557b --- /dev/null +++ b/src/core/unit/con_ramp.jl @@ -0,0 +1,47 @@ +@doc raw""" + _unit_con_ramp!(model::JuMP.Model, unit::Unit) + +Add the auxiliary constraint that enables calculation of per snapshot ramping to the `model`. + +Depending on whether ramps are enabled, none, one, or both of the following constraints are constructed: + +```math +\begin{aligned} + & \text{ramp}_{\text{up}, t} \geq \text{conversion}_{t} - \text{conversion}_{t-1}, \qquad \forall t \in T \\ + & \text{ramp}_{\text{down}, t} \geq \text{conversion}_{t-1} - \text{conversion}_{t}, \qquad \forall t \in T +\end{aligned} +``` + +This calculates the ramping that happens from the PREVIOUS snapshot to this one. That means that if: +- `out[5] = 100` and `out[4] = 50`, then `ramp_up[5] = 50` and `ramp_down[5] = 0` +- `ramp_up[1] = ramp_down[1] = 0` + +!!! info + This currently does not support pre-setting the initial states of the unit (it can be done manually but there is no + exposed parameter), which will be implemented in the future to allow for easy / correct rolling optimization runs. +""" +function _unit_con_ramp!(unit::Unit) + model = unit.model + + # Extract the unique capacity carrier, which ramps are based on. + out = _total(unit, unit.capacity_carrier.inout, unit.capacity_carrier.carrier.name) + + if unit.enable_ramp_up && !isnothing(unit.ramp_up_cost) + unit.con.ramp_up = @constraint( + model, + [t = _iesopt(model).model.T], + unit.var.ramp_up[t] >= out[t] - ((t == 1) ? out[t] : out[t - 1]), + base_name = _base_name(unit, "ramp_up"), + container = Array + ) + end + if unit.enable_ramp_down && !isnothing(unit.ramp_down_cost) + unit.con.ramp_down = @constraint( + model, + [t = _iesopt(model).model.T], + unit.var.ramp_down[t] >= ((t == 1) ? out[t] : out[t - 1]) - out[t], + base_name = _base_name(unit, "ramp_down"), + container = Array + ) + end +end diff --git a/src/core/unit/con_ramp_limit.jl b/src/core/unit/con_ramp_limit.jl new file mode 100644 index 0000000..29cb884 --- /dev/null +++ b/src/core/unit/con_ramp_limit.jl @@ -0,0 +1,51 @@ +# Note: This uses `_unit_capacity_limits` from `constr_conversion_ub.jl`. + +@doc raw""" + _unit_con_ramp_limit!(model::JuMP.Model, unit::Unit) + +Add the constraint describing the ramping limits of this `unit` to the `model`. + +This makes use of the maximum capacity of the `unit`, which is just the total installed capacity. Both, up- and +downwards ramps can be enabled separately (via `unit.ramp_up_limit` and `unit.ramp_down_limit`), resulting in either or +both of: + +```math +\begin{aligned} + & \text{ramp}_{\text{up}, t} \leq \text{ramplimit}_\text{up} \cdot \text{capacity}_\text{max} \cdot \omega_t, \qquad \forall t \in T \\ + & \text{ramp}_{\text{down}, t} \leq \text{ramplimit}_\text{down} \cdot \text{capacity}_\text{max} \cdot \omega_t, \qquad \forall t \in T +\end{aligned} +``` + +This does **not** make use of the ramping variable (that is only used for costs - if there are costs). + +This calculates the ramping that happens from the PREVIOUS snapshot to this one. That means that if: +- `out[5] = 100` and `out[4] = 50`, then `ramp_up[5] = 50` and `ramp_down[5] = 0` +- `ramp_up[1] = ramp_down[1] = 0` +""" +function _unit_con_ramp_limit!(unit::Unit) + model = unit.model + + # Extract the unique capacity carrier, which ramps are based on. + out = _total(unit, unit.capacity_carrier.inout, unit.capacity_carrier.carrier.name) + + if unit.enable_ramp_up && !isnothing(unit.ramp_up_limit) + unit.con.ramp_up_limit = @constraint( + model, + [t = _iesopt(model).model.T], + out[t] - ((t == 1) ? out[t] : out[t - 1]) <= + unit.ramp_up_limit * _weight(model, t) * _get(unit.unit_count) * _get(unit.capacity, t), + base_name = _base_name(unit, "ramp_up_limit"), + container = Array + ) + end + if unit.enable_ramp_down && !isnothing(unit.ramp_down_limit) + unit.con.ramp_down_limit = @constraint( + model, + [t = _iesopt(model).model.T], + ((t == 1) ? out[t] : out[t - 1]) - out[t] <= + unit.ramp_down_limit * _weight(model, t) * _get(unit.unit_count) * _get(unit.capacity, t), + base_name = _base_name(unit, "ramp_down_limit"), + container = Array + ) + end +end diff --git a/src/core/unit/con_startup.jl b/src/core/unit/con_startup.jl new file mode 100644 index 0000000..4cd5f2b --- /dev/null +++ b/src/core/unit/con_startup.jl @@ -0,0 +1,41 @@ +@doc raw""" + _unit_con_startup!(model::JuMP.Model, unit::Unit) + +Add the auxiliary constraint that enables calculation of per snapshot startup to the `model`. + +Depending on whether startup handling is enabled, the following constraint is constructed: + +```math +\begin{aligned} + & \text{startup}_{\text{up}, t} \geq \text{ison}_{t} - \text{ison}_{t-1}, \qquad \forall t \in T +\end{aligned} +``` + +This calculates the startup that happens from the PREVIOUS snapshot to this one. That means that if: +- `ison[5] = 1` and `ison[4] = 0`, then `startup[5] = 1` +""" +function _unit_con_startup!(unit::Unit) + if isnothing(unit.startup_cost) || (unit.unit_commitment === :off) + return nothing + end + + model = unit.model + + unit.con.startup_lb = @constraint( + model, + [t = _iesopt(model).model.T], + unit.var.startup[t] >= unit.var.ison[t] - ((t == 1) ? unit.is_on_before : unit.var.ison[t - 1]), + base_name = _base_name(unit, "startup_lb"), + container = Array + ) + + return nothing + + # The following constraint is currently not active, since it should never be necessary (= model will never startup + # more units than are available). + # unit.constr_startup_ub = @constraint( + # model, [t=_iesopt(model).model.T], + # unit.var.startup[t] <= _get(unit.unit_count), + # base_name=_base_name(model, "unit_startup_ub", (n=unit.name, t=_snapshot(model, t).name)) + # ) +end diff --git a/src/core/unit/obj_marginal_cost.jl b/src/core/unit/obj_marginal_cost.jl new file mode 100644 index 0000000..c92cdfc --- /dev/null +++ b/src/core/unit/obj_marginal_cost.jl @@ -0,0 +1,31 @@ +@doc raw""" + _unit_obj_marginal_cost!(model::JuMP.Model, unit::Unit) + +Add the (potential) cost of this `unit`'s conversion (`unit.marginal_cost`) to the global objective function. + +```math +\sum_{t \in T} \text{conversion}_t \cdot \text{marginalcost}_t \cdot \omega_t +``` +""" +function _unit_obj_marginal_cost!(unit::Unit) + if isnothing(unit.marginal_cost) + return nothing + end + + model = unit.model + total_mc::Vector{JuMP.AffExpr} = + _total(unit, unit.marginal_cost_carrier.inout, unit.marginal_cost_carrier.carrier.name) + + unit.obj.marginal_cost = JuMP.AffExpr(0.0) + for t in _iesopt(model).model.T + JuMP.add_to_expression!( + unit.obj.marginal_cost, + total_mc[_iesopt(model).model.snapshots[t].representative], + _weight(model, t) * _get(unit.marginal_cost, t), + ) + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, unit.obj.marginal_cost) + + return nothing +end diff --git a/src/core/unit/obj_ramp_cost.jl b/src/core/unit/obj_ramp_cost.jl new file mode 100644 index 0000000..3b2b44d --- /dev/null +++ b/src/core/unit/obj_ramp_cost.jl @@ -0,0 +1,34 @@ +@doc raw""" + _unit_obj_ramp_cost!(model::JuMP.Model, unit::Unit) + +Add the (potential) cost of this `unit`'s ramping to the global objective function. + +To allow for finer control, costs of up- and downwards ramping can be specified separately (using `unit.ramp_up_cost` +and `unit.ramp_down_cost`): + +```math +\sum_{t \in T} \text{ramp}_{\text{up}, t} \cdot \text{rampcost}_{\text{up}} + \text{ramp}_{\text{down}, t} \cdot \text{rampcost}_{\text{down}} +``` +""" +function _unit_obj_ramp_cost!(unit::Unit) + unit.enable_ramp_up || unit.enable_ramp_down || return nothing + + model = unit.model + + unit.obj.ramp_cost = JuMP.AffExpr(0.0) + if unit.enable_ramp_up && !isnothing(unit.ramp_up_cost) + JuMP.add_to_expression!( + unit.obj.ramp_cost, + _affine_expression(unit.var.ramp_up[t] * unit.ramp_up_cost for t in _iesopt(model).model.T), + ) + end + if unit.enable_ramp_down && !isnothing(unit.ramp_down_cost) + JuMP.add_to_expression!( + unit.obj.ramp_cost, + _affine_expression(unit.var.ramp_down[t] * unit.ramp_down_cost for t in _iesopt(model).model.T), + ) + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, unit.obj.ramp_cost) + return nothing +end diff --git a/src/core/unit/obj_startup_cost.jl b/src/core/unit/obj_startup_cost.jl new file mode 100644 index 0000000..fd22de5 --- /dev/null +++ b/src/core/unit/obj_startup_cost.jl @@ -0,0 +1,26 @@ +@doc raw""" + _unit_obj_startup_cost!(model::JuMP.Model, unit::Unit) + +Add the (potential) cost of this `unit`'s startup behaviour (configured by `unit.startup_cost` if +`unit.unit_commitment != :off`). + +```math +\sum_{t \in T} \text{startup}_t \cdot \text{startupcost} +``` +""" +function _unit_obj_startup_cost!(unit::Unit) + if isnothing(unit.startup_cost) || (unit.unit_commitment === :off) + return nothing + end + + model = unit.model + + unit.obj.startup_cost = JuMP.AffExpr(0.0) + for t in _iesopt(model).model.T + JuMP.add_to_expression!(unit.obj.startup_cost, unit.var.startup[t], unit.startup_cost) + end + + push!(_iesopt(model).model.objectives["total_cost"].terms, unit.obj.startup_cost) + + return nothing +end diff --git a/src/core/unit/var_conversion.jl b/src/core/unit/var_conversion.jl new file mode 100644 index 0000000..a4b5d51 --- /dev/null +++ b/src/core/unit/var_conversion.jl @@ -0,0 +1,188 @@ +# Note: This file relies on the successful creation of the `var_ison` => functions need to be called after that. + +@doc raw""" + _unit_var_conversion!(model::JuMP.Model, unit::Unit) + +Add the variable describing the `unit`'s conversion to the `model`. + +This can be accessed via `unit.var.conversion[t]`; this does not describe the full output of the `Unit` since that maybe +also include fixed generation based on the `ison` variable. + +!!! info + This applies some heavy recalculation of efficiencies to account for minimum load and so on, that are currently not + fully documented. This essentially comes down to the following: As long as minimum load is not enabled, that is + rather simple (using the conversion expression to withdraw energy from the inputs and push energy into the outputs). + If a separate minimum load conversion is specified it results in the following: (1) if running at minimum load the + supplied minimum load conversion will be used; (2) if running at maximum capacity the "normal" conversion expression + will be used; (3) for any point in-between a linear interpolation scales up all coefficients of the conversion + expression to "connect" case (1) and (2). +""" +function _unit_var_conversion!(unit::Unit) + model = unit.model + + if !_has_representative_snapshots(model) + unit.var.conversion = @variable( + model, + [t = _iesopt(model).model.T], + lower_bound = 0, + base_name = _base_name(unit, "conversion"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @variable(model, lower_bound = 0, base_name = _base_name(unit, "conversion[$(t)]")) for + t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all variables, either as themselves or their representative. + unit.var.conversion = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + + return _unit_var_conversion_connect!(unit) +end + +function _unit_var_conversion_connect!(unit::Unit) + # Pre-calculate the Unit's conversion limits once. + limits = _unit_capacity_limits(unit) + + # Properly connect in- and outputs based on conversion rule. + if isnothing(unit.conversion_at_min) + _unit_var_conversion_connect!(unit, limits) + else + incremental_efficiencies = Dict( + dir => Dict( + carrier => ( + (value .- unit.min_conversion .* unit.conversion_at_min_dict[dir][carrier]) ./ + (1.0 - unit.min_conversion) + ) for (carrier, value) in unit.conversion_dict[dir] + ) for dir in [:in, :out] + ) + _unit_var_conversion_connect!(unit, limits, incremental_efficiencies) + end + + return nothing +end + +function _unit_var_conversion_connect!(unit::Unit, limits::Dict, incremental_efficiencies::Dict) + model = unit.model + components = _iesopt(model).model.components + + # TODO: re-order this for loop like in the function below + for t in _iesopt(model).model.T + _iesopt(model).model.snapshots[t].is_representative || continue + + for carrier in keys(unit.conversion_dict[:in]) + _total(unit, :in, carrier.name)[t] = ( + _get(unit.conversion_at_min_dict[:in][carrier], t) * limits[:min][t] * _get(unit.capacity, t) + + _get(incremental_efficiencies[:in][carrier], t) * unit.var.conversion[t] + ) + JuMP.add_to_expression!( + components[unit.inputs[carrier]].exp.injection[t], + -_total(unit, :in, carrier.name)[t], + ) + end + + for carrier in keys(unit.conversion_dict[:out]) + _total(unit, :out, carrier.name)[t] = ( + _get(unit.conversion_at_min_dict[:out][carrier], t) * limits[:min][t] * _get(unit.capacity, t) + + _get(incremental_efficiencies[:out][carrier], t) * unit.var.conversion[t] + ) + JuMP.add_to_expression!( + components[unit.outputs[carrier]].exp.injection[t], + _total(unit, :out, carrier.name)[t], + ) + end + end + + return nothing +end + +function _unit_var_conversion_connect!(unit::Unit, limits::Dict) + # There is just a single efficiency to care about. + model = unit.model + + components = _iesopt(model).model.components + unit_var_conversion = unit.var.conversion + + input_totals = Dict{Carrier, Vector{JuMP.AffExpr}}( + carrier => _total(unit, :in, carrier.name) for carrier in keys(unit.conversion_dict[:in]) + ) + output_totals = Dict{Carrier, Vector{JuMP.AffExpr}}( + carrier => _total(unit, :out, carrier.name) for carrier in keys(unit.conversion_dict[:out]) + ) + + _a = collect(_get(limits[:min], t) for t in _iesopt(model).model.T) + _b = collect(_get(unit.capacity, t) for t in _iesopt(model).model.T) + _c::Vector{JuMP.VariableRef} = collect(unit_var_conversion[t] for t in _iesopt(model).model.T) + + _mode::Symbol, _term1::Vector{JuMP.AffExpr}, _term2::Vector{Float64} = ( + if _a[1] isa Number + if _b[1] isa Number + (:single, Vector{JuMP.AffExpr}(), convert.(Float64, _a .* _b)) + else + (:multi, _b, convert.(Float64, _a)) + end + elseif _b[1] isa Number + (:multi, _a, convert.(Float64, _b)) + else + @critical "Fatal error" + end + ) + + _snapshots = _iesopt(model).model.snapshots + _T = [t for t in _iesopt(model).model.T if _snapshots[t].is_representative] + + for (carrier, mult) in unit.conversion_dict[:in] + _inpinj::Vector{JuMP.AffExpr} = components[unit.inputs[carrier]].exp.injection + for t in _T + _inpinj_t::JuMP.AffExpr = _inpinj[t] + + # (_get(limits[:min], t) * _get(unit.capacity, t) + unit.var.conversion[t]) * _get(mult, t) + # = (a*b + c) * d + _d::Float64 = _get(mult, t) + _expr::JuMP.AffExpr = input_totals[carrier][t] + _coeff::Float64 = _d * _term2[t] + + JuMP.add_to_expression!(_expr, _c[t], _d) + JuMP.add_to_expression!(_inpinj_t, _c[t], -_d) + + if _mode == :single + JuMP.add_to_expression!(_expr, _coeff) + JuMP.add_to_expression!(_inpinj_t, -_coeff) + else + JuMP.add_to_expression!(_expr, _term1[t], _coeff) + JuMP.add_to_expression!(_inpinj_t, _term1[t], -_coeff) + end + end + end + + for (carrier, mult) in unit.conversion_dict[:out] + _outinj::Vector{JuMP.AffExpr} = components[unit.outputs[carrier]].exp.injection + for t in _T + _outinj_t::JuMP.AffExpr = _outinj[t] + + # (_get(limits[:min], t) * _get(unit.capacity, t) + unit.var.conversion[t]) * _get(mult, t) + # = (a*b + c) * d + _d::Float64 = _get(mult, t) + _expr::JuMP.AffExpr = output_totals[carrier][t] + _coeff::Float64 = _d * _term2[t] + + JuMP.add_to_expression!(_expr, _c[t], _d) + JuMP.add_to_expression!(_outinj_t, _c[t], _d) + + if _mode == :single + JuMP.add_to_expression!(_expr, _coeff) + JuMP.add_to_expression!(_outinj_t, _coeff) + else + JuMP.add_to_expression!(_expr, _term1[t], _coeff) + JuMP.add_to_expression!(_outinj_t, _term1[t], _coeff) + end + end + end + + return nothing +end diff --git a/src/core/unit/var_ison.jl b/src/core/unit/var_ison.jl new file mode 100644 index 0000000..58d99bf --- /dev/null +++ b/src/core/unit/var_ison.jl @@ -0,0 +1,39 @@ +@doc raw""" + _unit_var_ison!(model::JuMP.Model, unit::Unit) + +Add the variable describing the current "online" state of the `unit` to the `model`. + +The variable can be further parameterized using the `unit.unit_commitment` setting ("linear", "binary", "integer"). It +will automatically enforce the constraints ``0 \leq \text{ison} \leq \text{unitcount}``, with ``\text{unitcount}`` +describing the number of units that are aggregated in this `unit` (set by `unit.unit_count`). This can be accessed via +`unit.var.ison[t]`. +""" +function _unit_var_ison!(unit::Unit) + (unit.unit_commitment === :off) && return + + model = unit.model + + # The lower bound `0 <= var_ison` is redundant since `0 <= var_conversion <= var_ison` holds always. + if isa(_get(unit.unit_count), Number) + unit.var.ison = @variable( + model, + [t = _iesopt(model).model.T], + binary = (unit.unit_commitment === :binary), + integer = (unit.unit_commitment === :integer), + lower_bound = 0, + upper_bound = _get(unit.unit_count), + base_name = _base_name(unit, "ison"), + container = Array + ) + else + unit.var.ison = @variable( + model, + [t = _iesopt(model).model.T], + binary = (unit.unit_commitment === :binary), + integer = (unit.unit_commitment === :integer), + lower_bound = 0, + base_name = _base_name(unit, "ison"), + container = Array + ) + end +end diff --git a/src/core/unit/var_ramp.jl b/src/core/unit/var_ramp.jl new file mode 100644 index 0000000..a627313 --- /dev/null +++ b/src/core/unit/var_ramp.jl @@ -0,0 +1,38 @@ +@doc raw""" + _unit_var_ramp!(model::JuMP.Model, unit::Unit) + +Add the variable describing the per-snapshot ramping to the `model`. + +This adds two variables per snapshot to the model (if the respective setting `unit.enable_ramp_up` or +`unit.enable_ramp_down` is activated). Both are preconstructed with a fixed lower bound of `0`. This describes the +amount of change in conversion that occurs during the current snapshot. These can be accessed via `unit.var.ramp_up[t]` +and `unit.var.ramp_down[t]`. + +These variables are only used for ramping **costs**. The limits are enforced directly on the conversion, which means +this variable only exists if costs are specified! +""" +function _unit_var_ramp!(unit::Unit) + model = unit.model + + # Construct the variables. + if unit.enable_ramp_up && !isnothing(unit.ramp_up_cost) + unit.var.ramp_up = @variable( + model, + [t = _iesopt(model).model.T], + lower_bound = 0, + base_name = _base_name(unit, "ramp_up"), + container = Array + ) + end + if unit.enable_ramp_down && !isnothing(unit.ramp_up_cost) + unit.var.ramp_down = @variable( + model, + [t = _iesopt(model).model.T], + lower_bound = 0, + base_name = _base_name(unit, "ramp_down"), + container = Array + ) + end + + return nothing +end diff --git a/src/core/unit/var_startup.jl b/src/core/unit/var_startup.jl new file mode 100644 index 0000000..6692b37 --- /dev/null +++ b/src/core/unit/var_startup.jl @@ -0,0 +1,44 @@ +@doc raw""" + _unit_var_startup!(model::JuMP.Model, unit::Unit) + +Add the variable describing the per-snapshot startup to the `model`. + +This adds a variable per snapshot to the model (if the respective setting `unit.unit_commitment` is activated). +The variable can be further parameterized using the `unit.unit_commitment` setting ("linear", "binary", "integer"). It +will automatically enforce the constraints ``0 \leq \text{startup} \leq \text{unitcount}``, with ``\text{unitcount}`` +describing the number of units that are aggregated in this `unit` (set by `unit.unit_count`). This +describes the startup that happens during the current snapshot and can be accessed via `unit.var.startup`. +""" +function _unit_var_startup!(unit::Unit) + if isnothing(unit.startup_cost) || (unit.unit_commitment === :off) + return nothing + end + + model = unit.model + + if !_has_representative_snapshots(model) + unit.var.startup = @variable( + model, + [t = _iesopt(model).model.T], + # This will automatically be binary/integer valued as soon as `var_ison` is. + # binary=(unit.unit_commitment === :binary), integer=(unit.unit_commitment === :integer), + lower_bound = 0.0, + base_name = _base_name(unit, "startup"), + container = Array + ) + else + # Create all representatives. + _repr = Dict( + t => @variable(model, lower_bound = 0.0, base_name = _base_name(unit, "startup[$(t)]")) for + t in _iesopt(model).model.T if _iesopt(model).model.snapshots[t].is_representative + ) + + # Create all variables, either as themselves or their representative. + unit.var.startup = collect( + _iesopt(model).model.snapshots[t].is_representative ? _repr[t] : + _repr[_iesopt(model).model.snapshots[t].representative] for t in _iesopt(model).model.T + ) + end + + return nothing +end diff --git a/src/opt/benders.jl b/src/opt/benders.jl new file mode 100644 index 0000000..12ab82a --- /dev/null +++ b/src/opt/benders.jl @@ -0,0 +1,466 @@ +# todo: +# - needs the same global parameters as the usual optimize! +# - does not work with binary variables in addons... + +mutable struct BendersData + main::JuMP.Model + sub::JuMP.Model + + decisions::Vector{String} + + max_rel_gap::Float64 + max_iterations::Int64 + max_time_s::Int64 + + initial_lower_bound::Float64 + + iteration::Int64 + cuts::Int64 + + user_defined_variables::Set{Symbol} + + function BendersData(main::JuMP.Model, sub::JuMP.Model, decisions::Vector{String}) + return new(main, sub, decisions, 1e-4, -1, -1, 0.0, 0, 0, Set()) + end + + function BendersData( + main::JuMP.Model, + sub::JuMP.Model, + decisions::Vector{String}, + max_rel_gap::Float64, + max_iterations::Int64, + max_time_s::Int64, + initial_lb::Float64, + ) + return new(main, sub, decisions, max_rel_gap, max_iterations, max_time_s, initial_lb, 0, 0, Set()) + end +end + +""" + function benders( + optimizer::DataType, + filename::String; + opt_attr_main::Dict=Dict(), + opt_attr_sub::Dict=Dict(), + rel_gap::Float64=1e-4, + max_iterations::Int64=-1, + max_time_s::Int64=-1, + suppress_log_lvl::Logging.LogLevel=Logging.Warn, + ) + +Perform automatic Benders decomposition for all Decisions in the model. + +Example usage: + +``` +import IESopt +import HiGHS + +oas_highs = Dict( + "solver" => "choose", + "run_crossover" => "off", + "primal_feasibility_tolerance" => 1e-3, + "dual_feasibility_tolerance" => 1e-3, + "ipm_optimality_tolerance" => 1e-3 +) + +IESopt.benders(HiGHS.Optimizer, "model/config.iesopt.yaml"; opt_attr_sub=oas_highs) +``` + +``` +import IESopt +import Gurobi + +oas_gurobi = Dict( + "Method" => 2, + "Crossover" => 0, + "Presolve" => 2, + "OptimalityTol" => 1e-3, + "FeasibilityTol" => 1e-3, + "BarConvTol" => 1e-3, +) + +IESopt.benders(Gurobi.Optimizer, "model/config.iesopt.yaml"; opt_attr_sub=oas_gurobi) +``` +""" +function benders( + optimizer::DataType, + filename::String; + opt_attr_main::Dict=Dict(), + opt_attr_sub::Dict=Dict(), + rel_gap::Float64=1e-4, + max_iterations::Int64=-1, + max_time_s::Int64=-1, + initial_lb::Float64=0.0, + suppress_log_lvl::Logging.LogLevel=Logging.Warn, + user_defined::Dict{Symbol, Set{Symbol}}=Dict(:main => Set{Symbol}(), :sub => Set{Symbol}()), + feasibility_penalty::Float64=1e6, + kwargs..., +) + _benders_warning = "Using automatic Benders decomposition for Decision optimization is an advanced feature, that \ + requires a carefully crafted model. Ensure that you are familiar with what is necessary or \ + consult with someone before trying this. The most important points are: (1) ensured \ + feasibility of the sub-problem, (2) the sub-problem being pure-LP, (3) the correct solver \ + using advanced MILP formulations in the main-problem, (4) a correct `problem_type` setting \ + in the config file corresponding to the problem type of the main-problem. Furthermore, result \ + extraction has to be done manually, and model handling / return values look differently." + @warn "[benders] $_benders_warning" + + _model_main = JuMP.direct_model(JuMP.optimizer_with_attributes(optimizer, opt_attr_main...)) + _model_sub = JuMP.direct_model(JuMP.optimizer_with_attributes(optimizer, opt_attr_sub...)) + + benders_data = + BendersData(_model_main, _model_sub, Vector{String}(), rel_gap, max_iterations, max_time_s, initial_lb) + + @info "[benders] Parsing model into main/sub" filename + + # Ignore everything below Error for now. + initial_log_lvl = Logging.min_enabled_level(Logging.current_logger()) + Logging.disable_logging(suppress_log_lvl) + + # Do the parse. + if !parse!(benders_data.main, filename; kwargs...) + @error "[benders] `parse!(...) failed (MAIN)" + return benders_data + end + if !parse!(benders_data.sub, filename; kwargs...) + @error "[benders] `parse!(...) failed (SUB)" + return benders_data + end + + # Restore logging. + Logging.disable_logging(initial_log_lvl) + + # Scan for Decisions / non-Decisions. + _cname_non_decisions = [] + for (cname, component) in benders_data.main.ext[:iesopt].model.components + if component isa Decision + push!(benders_data.decisions, cname) + else + push!(_cname_non_decisions, cname) + end + end + + # Disable everything that is not a Decision in the main-problem. + @info "[benders] Modify main model" + for cname in _cname_non_decisions + delete!(benders_data.main.ext[:iesopt].model.components, cname) + end + + # Build main-problem. + @info "[benders] Building main model" + build!(benders_data.main) + + # Modify Decisions in sub-problem. + @info "[benders] Modifying sub model with fixed Decisions" + for comp_name in benders_data.decisions + component(benders_data.sub, comp_name).mode = :fixed + component(benders_data.sub, comp_name).cost = nothing + component(benders_data.sub, comp_name).fixed_cost = nothing + component(benders_data.sub, comp_name).fixed_value = 0.0 + end + + # Build sub-problem. + @info "[benders] Build sub model" + build!(benders_data.sub) + + # ====================== + # JuMP.@variable(benders_data.main, _x >= 0) + # JuMP.@variable(benders_data.sub, _x >= 0) + + # JuMP.@constraint(benders_data.main, _c, benders_data.main[:_x] >= component(benders_data.main, "invest1_1").var.value) + # JuMP.@constraint(benders_data.sub, _c, benders_data.sub[:_x] >= component(benders_data.sub, "invest1_1").var.value) + + # user_defined = Dict( + # :main => Set([:_x]), :sub => Set([:_x, :_c]) + # ) + # ====================== + + # Check for user-defined objects that need to be split. + objects = keys(JuMP.object_dictionary(benders_data.main)) + constraint_relaxed_in_sub = false + if length(objects) > 0 + @info "[benders] Found user-defined objects, starting partial delete; make sure to name ALL your objects, and that you are not accessing objects that may not be constructed in the sub-problem (Decisions)" n = + length(objects) + + for obj in objects + in_main = false + in_sub = false + + if !(obj in user_defined[:main]) + JuMP.delete(benders_data.main, benders_data.main[obj]) + JuMP.unregister(benders_data.main, obj) + elseif _obj_type(benders_data.main[obj]) === :var + in_main = true + end + + if !(obj in user_defined[:sub]) + JuMP.delete(benders_data.sub, benders_data.sub[obj]) + JuMP.unregister(benders_data.sub, obj) + elseif _obj_type(benders_data.sub[obj]) === :var + in_sub = true + else + JuMP.relax_with_penalty!(benders_data.sub, Dict(benders_data.sub[obj] => feasibility_penalty)) + constraint_relaxed_in_sub = true + end + + if in_main && in_sub + push!(benders_data.user_defined_variables, obj) + end + end + + if length(benders_data.user_defined_variables) > 0 + @info "[benders] Duplicated user-defined variables found; will be controlled by main-problem; relaxing potential integrality in sub-problem" n = + length(benders_data.user_defined_variables) + JuMP.relax_integrality(benders_data.sub) + + if constraint_relaxed_in_sub + @warn "[benders] At least one constraint in the sub-problem was relaxed with penalized slacks (penalty $(feasibility_penalty)) to try to achieve feasibility" + end + end + end + + # Add the new variable and modify the objective of the main-problem. + @info "[benders] Modify main model and add initial cut" + @variable(benders_data.main, θ, lower_bound = benders_data.initial_lower_bound) + @objective(benders_data.main, Min, JuMP.objective_function(benders_data.main) + θ) + + # Permanently silence sub-problem. + JuMP.set_silent(benders_data.sub) + + # Check constraints safety. + if !isempty(benders_data.main.ext[:iesopt].aux.constraint_safety_penalties) + @info "[benders] Relaxing constraints based on constraint_safety (MAIN)" + benders_data.main.ext[:constraint_safety_expressions] = JuMP.relax_with_penalty!( + benders_data.main, + Dict(k => v.penalty for (k, v) in benders_data.main.ext[:iesopt].aux.constraint_safety_penalties), + ) + end + if !isempty(benders_data.sub.ext[:iesopt].aux.constraint_safety_penalties) + @info "[benders] Relaxing constraints based on constraint_safety (SUB)" + benders_data.sub.ext[:constraint_safety_expressions] = JuMP.relax_with_penalty!( + benders_data.sub, + Dict(k => v.penalty for (k, v) in benders_data.sub.ext[:iesopt].aux.constraint_safety_penalties), + ) + end + + # Choose the correct approach to handle the main-problem. + if _is_lp(benders_data.main) + @info "[benders] LP main detected, starting iterative mode" + _iterative_benders(benders_data) + elseif !JuMP.MOI.supports(JuMP.backend(benders_data.main), JuMP.MOI.LazyConstraintCallback()) + @warn "[benders] Solver does not support lazy callbacks, forcing iterative mode with possibly lower performance" + @info "[benders] Starting iterative mode" + _iterative_benders(benders_data) + else + @info "[benders] MILP main detected, starting callback mode" + if (benders_data.max_iterations > 0) || (benders_data.max_time_s > 0) + @error "[benders] Callback mode does not support time/iteration limits currently" + end + + @info "[benders] Register callback for main model" + custom_callback = (cb_data) -> _cb_benders(benders_data, cb_data) + JuMP.set_attribute(benders_data.main, JuMP.MOI.LazyConstraintCallback(), custom_callback) + + @info "[benders] Start MILP optimize" + JuMP.optimize!(benders_data.main) + @info "[benders] Finished optimization" inner_iterations = benders_data.iteration cuts = benders_data.cuts + end + + return benders_data +end + +function _cb_benders(benders_data::BendersData, cb_data::Any) + if JuMP.callback_node_status(cb_data, benders_data.main) != JuMP.MOI.CALLBACK_NODE_STATUS_INTEGER + # todo: is this actually better? + # return + end + + benders_data.iteration += 1 + + # Get the current solution from the main-problem. + current_decisions = Dict( + comp_name => JuMP.callback_value(cb_data, component(benders_data.main, comp_name).var.value) for + comp_name in benders_data.decisions + ) + current_user_defined_variables = Dict( + obj => JuMP.callback_value.(cb_data, benders_data.main[obj]) for obj in benders_data.user_defined_variables + ) + + # Update the sub-problem. + for (comp_name, value) in current_decisions + JuMP.fix(component(benders_data.sub, comp_name).var.value, value; force=true) + end + for (obj, value) in current_user_defined_variables + JuMP.fix.(benders_data.sub[obj], value; force=true) + end + + # Solve the sub-problem. + JuMP.optimize!(benders_data.sub) + @assert JuMP.result_count(benders_data.sub) != 0 "could not solve sub-problem" + + # Calculate objective bounds & gap. + obj_sub = JuMP.objective_value(benders_data.sub) + obj_lb = JuMP.callback_value(cb_data, JuMP.objective_function(benders_data.main)) + obj_ub = obj_lb - JuMP.callback_value(cb_data, benders_data.main[:θ]) + obj_sub + rel_gap = (obj_ub != 0.0) ? abs((obj_ub - obj_lb) / obj_ub) : (obj_lb == 0 ? 0.0 : Inf) + + if rel_gap <= benders_data.max_rel_gap + return + end + + # Add the new constraint. + if length(benders_data.user_defined_variables) > 0 + @warn "[benders] NOT FULLY IMPLEMENTED (for non arrays, ...)" maxlog = 1 + user_sum = sum( + sum( + JuMP.reduced_cost.(benders_data.sub[obj].data) .* + (benders_data.main[obj].data .- current_user_defined_variables[obj].data), + ) for obj in benders_data.user_defined_variables + ) + else + user_sum = 0 + end + cut = JuMP.@build_constraint( + benders_data.main[:θ] >= + obj_sub + + sum( + extract_result(benders_data.sub, comp_name, "value"; mode="dual") * + (component(benders_data.main, comp_name).var.value - value) for (comp_name, value) in current_decisions + ) + + user_sum + ) + + JuMP.MOI.submit(benders_data.main, JuMP.MOI.LazyConstraint(cb_data), cut) + benders_data.cuts += 1 + + return +end + +function _iterative_benders(benders_data::BendersData; exploration_iterations=0) + # Silence the main-problem since it will be called often. + JuMP.set_silent(benders_data.main) + + println("") + println(" iter. | lower bnd. | upper bnd. | rel. gap | time (s) ") + println("---------+--------------+--------------+--------------+--------------") + t_start = Dates.now() + + rel_gap = Inf + best_ub = Inf + while true + benders_data.iteration += 1 + + current_decisions = Dict() + current_user_defined_variables = Dict() + exploration = false + + if (benders_data.iteration <= exploration_iterations) && isempty(benders_data.user_defined_variables) + # Random values in the beginning to explore. + for comp_name in benders_data.decisions + comp = component(benders_data.main, comp_name) + lb = !isnothing(comp.lb) ? comp.lb : -500 + ub = !isnothing(comp.ub) ? comp.ub : 500 + current_decisions[comp_name] = lb + rand() * (ub - lb) + end + + exploration = true + else + # Solve the main-problem. + JuMP.optimize!(benders_data.main) + + # Obtain the solution from the main-problem. + current_decisions = Dict( + comp_name => extract_result(benders_data.main, comp_name, "value"; mode="value") for + comp_name in benders_data.decisions + ) + current_user_defined_variables = + Dict(obj => JuMP.value.(benders_data.main[obj]) for obj in benders_data.user_defined_variables) + end + + # Update the sub-problem. + for (comp_name, value) in current_decisions + JuMP.fix(component(benders_data.sub, comp_name).var.value, value; force=true) + end + for (obj, value) in current_user_defined_variables + JuMP.fix.(benders_data.sub[obj], value; force=true) + end + + # Solve the sub-problem. + JuMP.optimize!(benders_data.sub) + + if JuMP.result_count(benders_data.sub) == 0 + @error "[benders] Could not solve sub-problem" + return benders_data + end + + exploration_sum = 0.0 + if exploration + exploration_dict = Dict( + component(benders_data.main, comp_name).var.value => current_decisions[comp_name] for + comp_name in benders_data.decisions + ) + exploration_dict[benders_data.main[:θ]] = 0.0 + exploration_sum = JuMP.value(_var -> exploration_dict[_var], JuMP.objective_function(benders_data.main)) + end + + # Calculate bounds and current gap. + obj_sub = JuMP.objective_value(benders_data.sub) + obj_lb = exploration ? exploration_sum : JuMP.objective_value(benders_data.main) + obj_ub = obj_lb - (exploration ? 0.0 : JuMP.value(benders_data.main[:θ])) + obj_sub + best_ub = min(best_ub, obj_ub) + rel_gap = exploration ? Inf : ((best_ub != 0.0) ? abs((best_ub - obj_lb) / best_ub) : (obj_lb == 0 ? 0.0 : Inf)) + + # Info print + t_elapsed = Dates.Millisecond((Dates.now() - t_start)).value / 1000.0 + _print_iteration(benders_data.iteration, obj_lb, best_ub, rel_gap, t_elapsed) + + # Check abortion criteria. + if (benders_data.max_rel_gap > 0) && (rel_gap <= benders_data.max_rel_gap) + println("") + @info "[benders] Terminating iterative optimization" reason = "relative gap reached" time = + round(t_elapsed; digits=2) iterations = benders_data.iteration gap = rel_gap obj = obj_ub best_ub + break + end + if (benders_data.max_time_s > 0) && (t_elapsed >= benders_data.max_time_s) + println("") + @info "[benders] Terminating iterative optimization" reason = "time limit reached" time = + round(t_elapsed; digits=2) iterations = benders_data.iteration gap = rel_gap obj = obj_ub best_ub + break + end + if (benders_data.max_iterations > 0) && (benders_data.iteration >= benders_data.max_iterations) + println("") + @info "[benders] Terminating iterative optimization" reason = "iteration limit reached" time = + round(t_elapsed; digits=2) iterations = benders_data.iteration gap = rel_gap obj = obj_ub best_ub + break + end + + # Add the new constraint. + if length(benders_data.user_defined_variables) > 0 + @warn "[benders] NOT FULLY IMPLEMENTED (for non arrays, ...)" maxlog = 1 + user_sum = sum( + sum( + JuMP.reduced_cost.(benders_data.sub[obj].data) .* + (benders_data.main[obj].data .- current_user_defined_variables[obj].data), + ) for obj in benders_data.user_defined_variables + ) + else + user_sum = 0 + end + cut = JuMP.@constraint( + benders_data.main, + benders_data.main[:θ] >= + obj_sub + + sum( + extract_result(benders_data.sub, comp_name, "value"; mode="dual") * + (component(benders_data.main, comp_name).var.value - value) for + (comp_name, value) in current_decisions + ) + + user_sum + ) + benders_data.cuts += 1 + + # todo: save cuts + # todo: track cuts and disable them after X iterations of not being binding + end +end diff --git a/src/opt/opt.jl b/src/opt/opt.jl new file mode 100644 index 0000000..ff4e3d3 --- /dev/null +++ b/src/opt/opt.jl @@ -0,0 +1,2 @@ +include("benders.jl") +include("stochastic.jl") diff --git a/src/opt/sddp.jl b/src/opt/sddp.jl new file mode 100644 index 0000000..1303019 --- /dev/null +++ b/src/opt/sddp.jl @@ -0,0 +1,427 @@ +using IESopt +import JuMP +import HiGHS +import SDDP +import Random +using DataFrames, CSV + +model = JuMP.read_from_file("subproblem_127.mof.json") +print(model) + +# ====================================================================================================================== +# [[ COMPLEX ]] +# ====================================================================================================================== + +Random.seed!(1234) +_cost = rand(8760 * 4) .* 100.0 .+ 250.0 +_demand = max.(0.0, randn(8760 * 4) .* 2.0 .+ 8.5) +_inflow = max.(0.0, randn(8760 * 4) ./ 4.0 .+ 1.5) + +df = DataFrame(; cost=_cost, demand=_demand, inflow=_inflow) +CSV.write("examples/files/40/data.csv", df) + +#### +model = JuMP.Model(HiGHS.Optimizer) +generate!( + model, + "examples/40_sddp_complex.iesopt.yaml"; + offset=0, + count=8760 * 4, + state_initial=0, + parametric=true, + build_cost1=500.0, + build_cost2=5000.0, +) +JuMP.fix.(component(model, "inflow").var.aux_value, _inflow; force=true); +JuMP.fix.(component(model, "demand").var.aux_value, _demand; force=true); +true_cost = [] +for t in 1:365 + append!(true_cost, [_cost[t * 24 * 4] for _ in 1:(24 * 4)]) +end +objs = _iesopt(model).model.objectives +JuMP.@objective( + model, + Min, + sum(component(model, "thermal").var.aux_value[t] * true_cost[t] for t in 1:(8760 * 4)) + + objs["build1_value"].func + + objs["build2_value"].func +); +optimize!(model) # 7.3446044558e+07 (15s) +extract_result(model, "build1", "value"; mode="value") # 10 +extract_result(model, "build2", "value"; mode="value") # 100 +#### + +sddp_model = SDDP.LinearPolicyGraph(; stages=365, sense=:Min, lower_bound=0.0, optimizer=HiGHS.Optimizer) do model, t + JuMP.@variable(model, 0 <= x_storage, SDDP.State, initial_value = 0) + JuMP.@variable(model, 0 <= x_storage_cap, SDDP.State, initial_value = 0) + + generate!( + model, + "examples/40_sddp_complex.iesopt.yaml"; + offset=(t - 1) * 24 * 4, + count=24 * 4, + state_initial="null", + verbosity=false, + parametric=true, + build_cost1=500.0, + build_cost2=5000.0, + ) + + JuMP.@constraint(model, x_storage.in <= x_storage_cap.in) + JuMP.@constraint(model, x_storage.out <= x_storage_cap.in) + # JuMP.@constraint(model, component(model, "reservoir").var.state[1] <= x_storage_cap.in) + + JuMP.@constraint(model, component(model, "reservoir").var.state[1] == x_storage.in) + JuMP.@constraint( + model, + -JuMP.constraint_object(component(model, "reservoir").con.last_state_lb).func == x_storage.out + ) + + if t == 1 + JuMP.@constraint( + model, + x_storage_cap.out == + x_storage_cap.in + component(model, "build1").var.value + component(model, "build2").var.value + ) + else + JuMP.@constraint(model, x_storage_cap.out == x_storage_cap.in) + end + + Ω = [1.0] + P = [1.0] + SDDP.parameterize(model, Ω, P) do ω + return + end + + JuMP.fix.(component(model, "inflow").var.aux_value, _inflow[((t - 1) * 24 * 4 + 1):(t * 24 * 4)]; force=true) + JuMP.fix.(component(model, "demand").var.aux_value, _demand[((t - 1) * 24 * 4 + 1):(t * 24 * 4)]; force=true) + objs = _iesopt(model).model.objectives + + if t == 1 + SDDP.@stageobjective( + model, + objs["thermal"].func * _cost[t * 24 * 4] + objs["build1_value"].func + objs["build2_value"].func + ) + else + SDDP.@stageobjective(model, objs["thermal"].func * _cost[t * 24 * 4]) + end + + return +end + +SDDP.train(sddp_model; iteration_limit=1, add_to_existing_cuts=true) +# 1 iteration, 7.344658e+07, 1.39s, 730 solves + +simulations = SDDP.simulate( + sddp_model, + 50, + [:x_storage, :x_storage_cap]; + custom_recorders=Dict{Symbol, Function}( + :build => + (model::JuMP.Model) -> + JuMP.value(component(model, "build1").var.value) + JuMP.value(component(model, "build2").var.value), + :thermal => (model::JuMP.Model) -> JuMP.value(component(model, "thermal").var.aux_value[1]), + ), +) + +sum(map(simulations[1]) do node + return node[:build] +end) + +plt = SDDP.SpaghettiPlot(simulations) +SDDP.add_spaghetti(plt; title="Reservoir volume", ylabel="MWh", interpolate="step") do data + return data[:x_storage].out +end +SDDP.add_spaghetti(plt; title="Reservoir build", ylabel="MWh", interpolate="step") do data + return data[:build] +end +SDDP.add_spaghetti(plt; title="Stage objective", ylabel="EUR") do data + return data[:stage_objective] +end +SDDP.plot(plt, "spaghetti_plot_complex.html") + +# ====================================================================================================================== +# [[ PATHWAY ]] +# ====================================================================================================================== + +sddp_model = SDDP.LinearPolicyGraph(; stages=20, sense=:Min, lower_bound=0.0, optimizer=HiGHS.Optimizer) do model, t + JuMP.@variable(model, 0 <= x_storage, SDDP.State, initial_value = 5) + JuMP.@variable(model, 0 <= x_storage_cap, SDDP.State, initial_value = 10) + + generate!( + model, + "examples/39_sddp_path.iesopt.yaml"; + offset=(t - 1) * 10, + count=10, + state_initial="null", + verbosity=false, + parametric=true, + build_cost1=1.0, + build_cost2=1.0, + ) + + JuMP.@constraint(model, x_storage.in <= x_storage_cap.in) + JuMP.@constraint(model, x_storage.out <= x_storage_cap.in) + # JuMP.@constraint(model, component(model, "reservoir").var.state[1] <= x_storage_cap.in) + + JuMP.@constraint(model, component(model, "reservoir").var.state[1] == x_storage.in) + JuMP.@constraint( + model, + -JuMP.constraint_object(component(model, "reservoir").con.last_state_lb).func == x_storage.out + ) + + JuMP.@constraint( + model, + x_storage_cap.out == + x_storage_cap.in + component(model, "build1").var.value + component(model, "build2").var.value + ) + + _z = 0 + if t == 20 + _z = JuMP.@variable(model, lower_bound = 0) + JuMP.@constraint(model, x_storage.out + _z >= 5) + end + + v = JuMP.fix_value(component(model, "inflow").var.aux_value[1]) + objs = _iesopt(model).model.objectives + + Ω = [1.0] + P = [1.0] + SDDP.parameterize(model, Ω, P) do ω + return + end + + SDDP.@stageobjective( + model, + objs["thermal"].func + objs["build1_value"].func + 2 * objs["build2_value"].func + _z * 1e6 + ) + + return +end + +SDDP.train(sddp_model; iteration_limit=100) + +simulations = SDDP.simulate( + sddp_model, + 100, + [:x_storage, :x_storage_cap]; + custom_recorders=Dict{Symbol, Function}( + :build => + (model::JuMP.Model) -> + JuMP.value(component(model, "build1").var.value) + JuMP.value(component(model, "build2").var.value), + :thermal => (model::JuMP.Model) -> JuMP.value(component(model, "thermal").var.aux_value[1]), + ), +) + +plt = SDDP.SpaghettiPlot(simulations) +SDDP.add_spaghetti(plt; title="Reservoir size", ylabel="MWh", interpolate="step") do data + return data[:x_storage_cap].out +end +SDDP.add_spaghetti(plt; title="Reservoir build", ylabel="MWh", interpolate="step") do data + return data[:build] +end +SDDP.add_spaghetti(plt; title="Stage objective", ylabel="EUR", interpolate="step") do data + return data[:stage_objective] +end +SDDP.plot(plt, "spaghetti_plot_pathway.html") + +# ====================================================================================================================== +# [[ STOCHASTIC PATHWAY ]] +# ====================================================================================================================== + +sddp_model = SDDP.LinearPolicyGraph(; stages=20, sense=:Min, lower_bound=0.0, optimizer=HiGHS.Optimizer) do model, t + JuMP.@variable(model, 0 <= x_storage, SDDP.State, initial_value = 5) + JuMP.@variable(model, 0 <= x_storage_cap, SDDP.State, initial_value = 10) + + generate!( + model, + "examples/39_sddp_path.iesopt.yaml"; + offset=(t - 1) * 10, + count=10, + state_initial="null", + verbosity=false, + parametric=true, + build_cost1=1.0, + build_cost2=1.0, + ) + + JuMP.@constraint(model, x_storage.in <= x_storage_cap.in) + JuMP.@constraint(model, x_storage.out <= x_storage_cap.in) + # JuMP.@constraint(model, component(model, "reservoir").var.state[1] <= x_storage_cap.in) + + JuMP.@constraint(model, component(model, "reservoir").var.state[1] == x_storage.in) + JuMP.@constraint( + model, + -JuMP.constraint_object(component(model, "reservoir").con.last_state_lb).func == x_storage.out + ) + + JuMP.@constraint( + model, + x_storage_cap.out == + x_storage_cap.in + component(model, "build1").var.value + component(model, "build2").var.value + ) + + _z = 0 + if t == 20 + _z = JuMP.@variable(model, lower_bound = 0) + JuMP.@constraint(model, x_storage.out + _z >= 5) + end + + v = JuMP.fix_value(component(model, "inflow").var.aux_value[1]) + objs = _iesopt(model).model.objectives + + lower = -convert(Int64, floor(t / 5.0 * 2500)) + upper = -0.5 * lower + + Ω = [(capex=i,) for i in lower:10:upper] + P = [1.0 / length(Ω) for i in 1:length(Ω)] + + SDDP.parameterize(model, Ω, P) do ω + # JuMP.fix(component(model, "inflow").var.aux_value[1], max(0., v + ω.inflow); force=true) + # SDDP.@stageobjective(model, objs["thermal"].func + _z * 1e4) + SDDP.@stageobjective( + model, + objs["thermal"].func + + max(5, (t / 5.0 * 2500 + ω.capex) / 20) * (objs["build1_value"].func + 2 * objs["build2_value"].func) + + _z * 1e6 + ) + return + end + + return +end + +SDDP.train(sddp_model; iteration_limit=500) + +simulations = SDDP.simulate( + sddp_model, + 500, + [:x_storage, :x_storage_cap]; + custom_recorders=Dict{Symbol, Function}( + :build => + (model::JuMP.Model) -> + JuMP.value(component(model, "build1").var.value) + JuMP.value(component(model, "build2").var.value), + :thermal => (model::JuMP.Model) -> JuMP.value(component(model, "thermal").var.aux_value[1]), + ), +) + +plt = SDDP.SpaghettiPlot(simulations) +SDDP.add_spaghetti(plt; title="Reservoir size", ylabel="MWh") do data + return data[:x_storage_cap].out +end +SDDP.add_spaghetti(plt; title="Reservoir build", ylabel="MWh", interpolate="step") do data + return data[:build] +end +SDDP.add_spaghetti(plt; title="Stage objective", ylabel="EUR") do data + return data[:stage_objective] +end +SDDP.plot(plt, "spaghetti_plot_stochpathway.html") + +# ====================================================================================================================== +# [[ STOCHASTIC OPERATIONAL OPTIMIZATION ]] +# ====================================================================================================================== + +sddp_model = SDDP.LinearPolicyGraph(; stages=52, sense=:Min, lower_bound=0.0, optimizer=HiGHS.Optimizer) do model, t + JuMP.@variable(model, 0 <= x_storage <= 320, SDDP.State, initial_value = 300) + generate!( + model, + "examples/38_sddp_operational.iesopt.yaml"; + offset=(t - 1), + count=1, + state_initial="null", + verbosity=false, + parametric=true, + ) + + JuMP.@constraint(model, component(model, "reservoir").var.state[1] == x_storage.in) + JuMP.@constraint( + model, + -JuMP.constraint_object(component(model, "reservoir").con.last_state_lb).func == x_storage.out + ) + + _z = 0 + if t == 52 + _z = JuMP.@variable(model, lower_bound = 0) + JuMP.@constraint(model, x_storage.out + _z >= 300) + end + + v = JuMP.fix_value(component(model, "inflow").var.aux_value[1]) + objs = _iesopt(model).model.objectives + + Ω = [ + ( + inflow=round((rand() * 2.0 - 1.0) + ((rand() <= 0.02) * 20); digits=2), + fuel_multiplier=round(1.0 + rand() * 0.25; digits=2), + ) for _ in 1:15 + ] + P = rand(15) + P /= sum(P) + + SDDP.parameterize(model, Ω, P) do ω + JuMP.fix(component(model, "inflow").var.aux_value[1], max(0.0, v + ω.inflow); force=true) + SDDP.@stageobjective(model, ω.fuel_multiplier * objs["thermal"].func + _z * 1e4) + return + end + + return +end + +Ω = [ + (add_inflow_value=0.01, mult_thermal_cost=1.18), + (add_inflow_value=-0.05, mult_thermal_cost=1.14), + (add_inflow_value=0.43, mult_thermal_cost=1.07), + (add_inflow_value=-0.10, mult_thermal_cost=1.06), + (add_inflow_value=0.00, mult_thermal_cost=1.20), + (add_inflow_value=0.49, mult_thermal_cost=1.23), + (add_inflow_value=0.94, mult_thermal_cost=1.16), + (add_inflow_value=-0.22, mult_thermal_cost=1.18), + (add_inflow_value=0.00, mult_thermal_cost=1.00), + (add_inflow_value=0.42, mult_thermal_cost=1.08), + (add_inflow_value=-1.00, mult_thermal_cost=1.23), + (add_inflow_value=-0.41, mult_thermal_cost=1.18), + (add_inflow_value=0.06, mult_thermal_cost=1.14), + (add_inflow_value=-0.17, mult_thermal_cost=1.13), + (add_inflow_value=-0.93, mult_thermal_cost=1.05), +] + +P = [0.023, 0.143, 0.020, 0.053, 0.015, 0.013, 0.075, 0.105, 0.083, 0.067, 0.008, 0.072, 0.080, 0.141, 0.102] + +SDDP.parameterize(model, Ω, P) do ω + set_parameter("inflow", "value", inflow_value + add_inflow_value) + SDDP.@stageobjective(model, ω.mult_thermal_cost * objs["thermal"].func) +end + +SDDP.train(sddp_model; iteration_limit=500) + +# - distribution for cost factors (capex / opex) +# - changing cost factors (capex / opex) over time (E changing) +# - distributional noise on time series (profile cost / values) + +# stoch. betriebsoptimierung : CHECK +# pathway optimization +# stoch. pathway optimization + +simulations = SDDP.simulate( + sddp_model, + 50, + [:x_storage]; + custom_recorders=Dict{Symbol, Function}( + :thermal => (model::JuMP.Model) -> JuMP.value(component(model, "thermal").var.aux_value[1]), + :spillage => (model::JuMP.Model) -> JuMP.value(component(model, "spill").var.aux_value[1]), + ), +) + +plt = SDDP.SpaghettiPlot(simulations) +SDDP.add_spaghetti(plt; title="Reservoir volume", ylabel="MWh", interpolate="step") do data + return data[:x_storage].out +end +SDDP.add_spaghetti(plt; title="Thermal generation", ylabel="MWh", interpolate="step") do data + return data[:thermal] +end +SDDP.add_spaghetti(plt; title="Spillage", ylabel="MWh", interpolate="step") do data + return data[:spillage] +end +SDDP.add_spaghetti(plt; title="Stage objective", ylabel="EUR", interpolate="step") do data + return data[:stage_objective] +end +SDDP.plot(plt, "spaghetti_plot_stochopt.html") diff --git a/src/opt/stochastic.jl b/src/opt/stochastic.jl new file mode 100644 index 0000000..4dee6dd --- /dev/null +++ b/src/opt/stochastic.jl @@ -0,0 +1,352 @@ +# todo: +# - needs the same global parameters as the usual optimize! +# - does not work with binary variables in addons... + +using ProgressMeter + +mutable struct StochasticData4 + main::JuMP.Model + subs::Vector{JuMP.Model} + + scenarios::Vector{Dict} + decisions::Vector{String} + + max_rel_gap::Float64 + max_iterations::Int64 + max_time_s::Int64 + + initial_lower_bound::Float64 + + iteration::Int64 + cuts::Int64 + + user_defined_variables::Set{Symbol} + + function StochasticData4(main::JuMP.Model) + return new(main, Vector{JuMP.Model}(), Vector{Dict}(), Vector{String}(), 1e-4, -1, -1, 0.0, 0, 0, Set()) + end + + function StochasticData4( + main::JuMP.Model, + max_rel_gap::Float64, + max_iterations::Int64, + max_time_s::Int64, + initial_lb::Float64, + ) + return new( + main, + Vector{JuMP.Model}(), + Vector{Dict}(), + Vector{String}(), + max_rel_gap, + max_iterations, + max_time_s, + initial_lb, + 0, + 0, + Set(), + ) + end +end +StochasticData = StochasticData4 + +""" + function stochastic( + optimizer::DataType, + filename::String; + opt_attr_main::Dict=Dict(), + opt_attr_sub::Dict=Dict(), + rel_gap::Float64=1e-4, + max_iterations::Int64=-1, + max_time_s::Int64=-1, + suppress_log_lvl::Logging.LogLevel=Logging.Warn, + ) + +TODO! +``` +""" +function stochastic( + optimizer::DataType, + filename::String; + opt_attr_main::Dict=Dict(), + opt_attr_sub::Dict=Dict(), + rel_gap::Float64=1e-4, + max_iterations::Int64=-1, + max_time_s::Int64=-1, + initial_lb::Float64=0.0, + suppress_log_lvl::Logging.LogLevel=Logging.Warn, + user_defined::Dict{Symbol, Set{Symbol}}=Dict(:main => Set{Symbol}(), :sub => Set{Symbol}()), + feasibility_penalty::Float64=1e6, + kwargs..., +) + _stoch_warning = "Using automatic Benders decomposition for Decision optimization is an advanced feature, that \ + requires a carefully crafted model. Ensure that you are familiar with what is necessary or \ + consult with someone before trying this. The most important points are: (1) ensured \ + feasibility of the sub-problem, (2) the sub-problem being pure-LP, (3) the correct solver \ + using advanced MILP formulations in the main-problem, (4) a correct `problem_type` setting \ + in the config file corresponding to the problem type of the main-problem. Furthermore, result \ + extraction has to be done manually, and model handling / return values look differently." + @warn "[stochastic] $_stoch_warning" + + _model_main = JuMP.direct_model(JuMP.optimizer_with_attributes(optimizer, opt_attr_main...)) + stochastic_data = StochasticData(_model_main, rel_gap, max_iterations, max_time_s, initial_lb) + + @info "[stochastic] Parsing model into main" filename + + # Update logging. + initial_log_lvl = Logging.min_enabled_level(Logging.current_logger()) + Logging.disable_logging(suppress_log_lvl) + + # Do the parse for the main-problem. + if !parse!(stochastic_data.main, filename; kwargs...) + @error "[stochastic] `parse!(...) failed (MAIN)" + return stochastic_data + end + + # Restore logging. + Logging.disable_logging(initial_log_lvl) + + @info "[stochastic] Preparing scenarios" filename + if stochastic_data.main.ext[:stochastic][:base_config]["scenarios"] == "all" + _values = values(stochastic_data.main.ext[:stochastic][:base_config]["parameters"]) + _keys = keys(stochastic_data.main.ext[:stochastic][:base_config]["parameters"]) + _zipped = vec(collect(Iterators.product(_values...))) + + for scenario in _zipped + push!(stochastic_data.scenarios, Dict(zip(_keys, scenario))) + end + else + @error "[stochastic] Scenario mode currently not supported" mode = + stochastic_data.main.ext[:stochastic][:base_config]["scenarios"] + return stochastic_data + end + @info "[stochastic] Scenarios prepared" number = length(stochastic_data.scenarios) + + @info "[stochastic] Parsing model into sub-problems" filename + + # Update logging. + initial_log_lvl = Logging.min_enabled_level(Logging.current_logger()) + Logging.disable_logging(suppress_log_lvl) + + for scenario in stochastic_data.scenarios + _model_sub = JuMP.direct_model(JuMP.optimizer_with_attributes(optimizer, opt_attr_sub...)) + if !parse!(_model_sub, filename; Dict(Symbol(k) => v for (k, v) in merge(kwargs, scenario))...) + @error "[stochastic] `parse!(...) failed (SUB)" + return stochastic_data + end + push!(stochastic_data.subs, _model_sub) + end + + # Restore logging. + Logging.disable_logging(initial_log_lvl) + + # Scan for Decisions / non-Decisions. + _cname_non_decisions = [] + for (cname, component) in stochastic_data.main.ext[:iesopt].model.components + if component isa Decision + push!(stochastic_data.decisions, cname) + else + push!(_cname_non_decisions, cname) + end + end + + # Disable everything that is not a Decision in the main-problem. + @info "[stochastic] Modify main model" + for cname in _cname_non_decisions + delete!(stochastic_data.main.ext[:iesopt].model.components, cname) + end + + # Build main-problem. + @info "[stochastic] Building main model" + build!(stochastic_data.main) + + # Modify Decisions in sub-problem. + @info "[stochastic] Modifying sub models with fixed Decisions" + + # Update logging. + initial_log_lvl = Logging.min_enabled_level(Logging.current_logger()) + Logging.disable_logging(suppress_log_lvl) + + @showprogress "Modifying sub models: " for sub in stochastic_data.subs + for comp_name in stochastic_data.decisions + component(sub, comp_name).mode = :fixed + component(sub, comp_name).cost = nothing + component(sub, comp_name).fixed_cost = nothing + component(sub, comp_name).fixed_value = 0.0 + end + end + + # Restore logging. + Logging.disable_logging(initial_log_lvl) + + # Build sub-problems. + @info "[stochastic] Build sub models" + + # Update logging. + initial_log_lvl = Logging.min_enabled_level(Logging.current_logger()) + Logging.disable_logging(suppress_log_lvl) + + @showprogress "Building sub models: " for sub in stochastic_data.subs + build!(sub) + end + + # Restore logging. + Logging.disable_logging(initial_log_lvl) + + @warn "[stochastic] Stochastic optimization does currently not support user defined functionality; if you are using Addons reconsider" + + # Add the new variable and modify the objective of the main-problem. + @info "[stochastic] Modify main model and add initial cut" + n_subs = length(stochastic_data.subs) + @variable(stochastic_data.main, θ[s=1:n_subs], lower_bound = stochastic_data.initial_lower_bound) + @objective(stochastic_data.main, Min, JuMP.objective_function(stochastic_data.main) + sum(θ) / n_subs) + + # Permanently silence sub-problems. + for sub in stochastic_data.subs + JuMP.set_silent(sub) + end + + # Check constraints safety. + if !isempty(stochastic_data.main.ext[:iesopt].aux.constraint_safety_penalties) + @info "[stochastic] Relaxing constraints based on constraint_safety (MAIN)" + stochastic_data.main.ext[:constraint_safety_expressions] = JuMP.relax_with_penalty!( + stochastic_data.main, + Dict(k => v.penalty for (k, v) in stochastic_data.main.ext[:iesopt].aux.constraint_safety_penalties), + ) + end + if !isempty(stochastic_data.subs[1].ext[:iesopt].aux.constraint_safety_penalties) + @info "[stochastic] Relaxing constraints based on constraint_safety (SUBs)" + @showprogress "Modifying sub models: " for sub in stochastic_data.subs + sub.ext[:constraint_safety_expressions] = JuMP.relax_with_penalty!( + sub, + Dict(k => v.penalty for (k, v) in sub.ext[:iesopt].aux.constraint_safety_penalties), + ) + end + end + + # Choose the correct approach to handle the main-problem. + if _is_lp(stochastic_data.main) + @info "[stochastic] LP main detected, starting iterative mode" + _iterative_stochastic(stochastic_data) + elseif !JuMP.MOI.supports(JuMP.backend(stochastic_data.main), JuMP.MOI.LazyConstraintCallback()) + @warn "[stochastic] Solver does not support lazy callbacks, forcing iterative mode with possibly lower performance" + @info "[stochastic] Starting iterative mode" + _iterative_stochastic(stochastic_data) + else + @warn "[stochastic] Callback mode is currently not implemented, defaulting down to iterative mode" + @info "[stochastic] Starting iterative mode" + _iterative_stochastic(stochastic_data) + + # @info "[stochastic] MILP main detected, starting callback mode" + # if (stochastic_data.max_iterations > 0) || (stochastic_data.max_time_s > 0) + # @error "[stochastic] Callback mode does not support time/iteration limits currently" + # end + + # @info "[stochastic] Register callback for main model" + # custom_callback = (cb_data) -> _cb_stochastic(stochastic_data, cb_data) + # JuMP.set_attribute(stochastic_data.main, JuMP.MOI.LazyConstraintCallback(), custom_callback) + + # @info "[stochastic] Start MILP optimize" + # JuMP.optimize!(stochastic_data.main) + # @info "[stochastic] Finished optimization" inner_iterations = stochastic_data.iteration cuts = stochastic_data.cuts + end + + return stochastic_data +end + +function _cb_stochastic(benders_data::BendersData, cb_data::Any) + @error "[stochastic] Callback mode currently not implemented" +end + +function _iterative_stochastic(stochastic_data::StochasticData) + # Silence the main-problem since it will be called often. + JuMP.set_silent(stochastic_data.main) + + println("") + println(" iter. | lower bnd. | upper bnd. | rel. gap | time (s) ") + println("---------+--------------+--------------+--------------+--------------") + t_start = Dates.now() + + rel_gap = Inf + best_ub = Inf + while true + stochastic_data.iteration += 1 + + current_decisions = Dict() + + # Solve the main-problem. + JuMP.optimize!(stochastic_data.main) + + # Obtain the solution from the main-problem. + current_decisions = Dict( + comp_name => extract_result(stochastic_data.main, comp_name, "value"; mode="value") for + comp_name in stochastic_data.decisions + ) + + # Update the sub-problems. + for sub in stochastic_data.subs + for (comp_name, value) in current_decisions + JuMP.fix(component(sub, comp_name).var.value, value; force=true) + end + end + + # Solve the sub-problems. + for i in eachindex(stochastic_data.subs) + sub = stochastic_data.subs[i] + JuMP.optimize!(sub) + + if JuMP.result_count(sub) == 0 + @error "[stochastic] Could not solve sub-problem" scenario = stochastic_data.scenarios[i] + return stochastic_data + end + end + + # Calculate bounds and current gap. + obj_subs = collect(JuMP.objective_value(sub) for sub in stochastic_data.subs) + obj_lb = JuMP.objective_value(stochastic_data.main) + obj_ub = obj_lb - sum(JuMP.value.(stochastic_data.main[:θ])) + sum(obj_subs) + best_ub = min(best_ub, obj_ub) + rel_gap = ((best_ub != 0.0) ? abs((best_ub - obj_lb) / best_ub) : (obj_lb == 0 ? 0.0 : Inf)) + + # Info print + t_elapsed = Dates.Millisecond((Dates.now() - t_start)).value / 1000.0 + _print_iteration(stochastic_data.iteration, obj_lb, best_ub, rel_gap, t_elapsed) + + # Check abortion criteria. + if (stochastic_data.max_rel_gap > 0) && (rel_gap <= stochastic_data.max_rel_gap) + println("") + @info "[stochastic] Terminating iterative optimization" reason = "relative gap reached" time = + round(t_elapsed; digits=2) iterations = stochastic_data.iteration gap = rel_gap obj = obj_ub best_ub + break + end + if (stochastic_data.max_time_s > 0) && (t_elapsed >= stochastic_data.max_time_s) + println("") + @info "[stochastic] Terminating iterative optimization" reason = "time limit reached" time = + round(t_elapsed; digits=2) iterations = stochastic_data.iteration gap = rel_gap obj = obj_ub best_ub + break + end + if (stochastic_data.max_iterations > 0) && (stochastic_data.iteration >= stochastic_data.max_iterations) + println("") + @info "[stochastic] Terminating iterative optimization" reason = "iteration limit reached" time = + round(t_elapsed; digits=2) iterations = stochastic_data.iteration gap = rel_gap obj = obj_ub best_ub + break + end + + # Add the new constraint. + for i in 1:length(stochastic_data.subs) + cut = JuMP.@constraint( + stochastic_data.main, + stochastic_data.main[:θ][i] >= + obj_subs[i] + sum( + extract_result(stochastic_data.subs[i], comp_name, "value"; mode="dual") * + (component(stochastic_data.main, comp_name).var.value - value) for + (comp_name, value) in current_decisions + ) + ) + stochastic_data.cuts += 1 + end + + # todo: save cuts + # todo: track cuts and disable them after X iterations of not being binding + end +end diff --git a/src/parser.jl b/src/parser.jl new file mode 100644 index 0000000..acd0a5e --- /dev/null +++ b/src/parser.jl @@ -0,0 +1,542 @@ +function _parse_model!(model::JuMP.Model, filename::String, global_parameters::Dict; verbosity=nothing) + filename = normpath(filename) + model.ext[:_iesopt_wd] = dirname(filename) + model.ext[:_iesopt_verbosity] = verbosity + model.ext[:iesopt] = _IESoptData(YAML.load_file(filename; dicttype=Dict{String, Any})) + + # Parse the overall global configuration (e.g., replacing parameters). + (@profile _parse_global_specification!(model, global_parameters)) || return false + + # Construct the final (internal) configuration structure. + _iesopt(model).input.config = _Config(model) + + # Attach a logger now, so logging can be suppressed/modified for the remaining parsing code. + _attach_logger!(model) + + with_logger(_iesopt(model).logger) do + @info "IESopt.jl (core) | 2021 © AIT Austrian Institute of Technology GmbH" authors = "Stefan Strömer, Daniel Schwabeneder, and contributors" version = + pkgversion(@__MODULE__) top_level_config = basename(filename) path = abspath(dirname(filename)) + if !isempty(_iesopt(model).input.parameters) + @info "Global parameters loaded" Dict(Symbol(k) => v for (k, v) in _iesopt(model).input.parameters)... + end + + @warn "You are using a version >= 0.6.0, which is not stable due to heavy refactoring; consider using 0.5.2" + + # Pre-load all registered files. + merge!(_iesopt(model).input.files, @profile _parse_inputfiles(model, _iesopt_config(model).files.entries)) + if !isempty(_iesopt(model).input.files) + @info "Successfully read $(length(_iesopt(model).input.files)) input file(s)" + end + + description = get(_iesopt(model).input._tl_yaml, "components", Dict{String, Any}()) + + # Parse all snapshots. + @profile _parse_snapshots!(model) + + # Parse all carriers beforehand, since those are used during component parsing. + @profile _parse_carriers!(model, get(_iesopt(model).input._tl_yaml, "carriers", nothing)) + + # Scan for all templates. + @profile _scan_all_templates(model) + + # Parse potential global addons + if haskey(_iesopt(model).input._tl_yaml, "addons") + merge!( + _iesopt(model).input.addons, + @profile _parse_global_addons(model, _iesopt(model).input._tl_yaml["addons"]) + ) + end + + # Parse potential external CSV files defining components. + @profile _parse_components_csv!(model, _iesopt(model).input._tl_yaml, description) + + # Fully flatten the model description before parsing. + @profile _flatten_model!(model, description) + merge!(_iesopt(model).aux._flattened_description, deepcopy(description)) + + # Construct the objectives container & add all registered objectives. + for (name, terms) in _iesopt_config(model).optimization.objective.functions + _iesopt(model).model.objectives[name] = + (terms=Set{JuMP.AffExpr}(), expr=JuMP.AffExpr(0.0), constants=Vector{Float64}()) + _iesopt(model).aux._obj_terms[name] = terms + end + + # Parse all components into a unified storage and keep a reference of "name=>id" matchings. + return (@profile _parse_components!(model, description)) + + # Construct the dictionary that holds all constraints that wish to be relaxed. These include (as value) + # their respective penalty. This exists, even if the model constraint_safety setting is off, since individual + # could choose to use it separately. + # -> this is already done when creating the IESopt internal data structure. + + @info "Profiling results after `parse` [time, top 5]" _profiling_format_top(model, 5)... + end + + return true +end + +function _parse_global_specification!(model::JuMP.Model, global_parameters::Dict) + data = _iesopt(model).input._tl_yaml + + # Check for stochastic configurations. + if haskey(data, "stochastic") + _iesopt(model).input.stochastic[:base_config] = data["stochastic"] + _iesopt(model).input.stochastic[:scenario] = Dict() + + if isempty(global_parameters) + @warn "Missing global parameters in stochastic model; you can safely ignore this warning if this is a stochastic main-problem" + else + for stochastic_param in keys(_iesopt(model).input.stochastic[:base_config]["parameters"]) + if haskey(global_parameters, stochastic_param) + _iesopt(model).input.stochastic[:scenario][stochastic_param] = global_parameters[stochastic_param] + else + @warn "Missing stochastic parameter; you can safely ignore this warning if this is a stochastic main-problem" stochastic_param + end + end + end + + if isempty(_iesopt(model).input.stochastic[:scenario]) + # No parameters registered. If this is a main model we need to supply a reasonable default to allow full + # parsing of the config file. If not there is something wrong. + @warn "Guessing defaults for stochastic parameter; you can safely ignore this warning if this is a stochastic main-problem" + for (stoch_param, entries) in _iesopt(model).input.stochastic[:base_config]["parameters"] + _iesopt(model).input.stochastic[:scenario][stoch_param] = entries[1] + @info "Guessing stochastic parameter" stoch_param value = entries[1] + end + end + end + + # Check if there are global parameters that need replacement. + if haskey(data, "parameters") || + (!isempty(_iesopt(model).input.stochastic) && !isempty(_iesopt(model).input.stochastic[:scenario])) + # Pop out parameters. + parameters = pop!(data, "parameters", Dict()) + + if parameters isa String + parameters = YAML.load_file(normpath(model.ext[:_iesopt_wd], parameters); dicttype=Dict{String, Any}) + elseif parameters isa Dict + else + @critical "Unrecognized format for global parameters" type = typeof(parameters) + end + + if !isempty(_iesopt(model).input.stochastic) + # Inject stochastic parameters. + for (key, value) in _iesopt(model).input.stochastic[:scenario] + if haskey(parameters, key) + @critical "Parameter name collision while trying to inject stochastic parameter" stoch_param = key + end + parameters[key] = value + end + end + + # Replace default values from `global_parameters`. + for (param, value) in parameters + parameters[param] = pop!(global_parameters, param, value) + isnothing(parameters[param]) && (@critical "Mandatory parameter missing value" parameter = param) + end + + # Report unused global parameters. + for (attr, _) in global_parameters + @warn "Parameter supplied but not used in model specification" parameter = attr + end + + # Construct the parsed global configuration with all parameter replacements. + replacements = Regex(join(["<$k>" for k in keys(parameters)], "|")) + _iesopt(model).input._tl_yaml = YAML.load( + replace( + replace(YAML.write(data), replacements => p -> parameters[p[2:(end - 1)]]), + "\"" => "", # this is necessary to prevent `Number`s being enclosed with "", ending up as `String` + "nothing" => "null", # this is necessary to properly preserve "null" (as nothing) + ); + dicttype=Dict{String, Any}, + ) + + merge!(_iesopt(model).input.parameters, parameters) + else + if !isempty(global_parameters) + @warn "Global parameters passed to IESopt, but none defined in model config" + end + end + + return true +end + +function _parse_global_addons(model::JuMP.Model, addons::Dict{String, Any}) + @info "Preloading global addons" + return Dict{String, NamedTuple}( + filename => (addon=_getfile(model, string(filename, ".jl")), config=prop) for (filename, prop) in addons + ) +end + +function _parse_inputfiles(model::JuMP.Model, files::Dict{String, String}) + isempty(files) || @info "Detected input files: Start preloading" + return Dict{String, Union{DataFrames.DataFrame, Module}}( + name => _getfile(model, filename) for (name, filename) in files + ) +end + +function _flatten_model!(model::JuMP.Model, description::Dict{String, Any}) + @info "Begin flattening model" + + cnt_disabled_components = 0 + toflatten::Vector{String} = collect(keys(description)) + + while length(toflatten) > 0 + cname = pop!(toflatten) + + # Skip a component if it is "disabled" (since we are still in the top-level). + if _parse_bool(model, get(description[cname], "disabled", false)) || + !_parse_bool(model, pop!(description[cname], "enabled", true)) + delete!(description, cname) + cnt_disabled_components += 1 + continue + end + + if description[cname]["type"] == "Expression" + @critical "The `Expression` Core Component is deprecated" component = cname + end + + # Skip core components. + (description[cname]["type"] in ["Node", "Connection", "Profile", "Unit", "Decision"]) && continue + + # Try parsing it. + new_components = _parse_noncore!(model, description, cname) + toflatten = vcat(toflatten, new_components) + end + + @info "Finished flattening model" number_of_disabled_components = cnt_disabled_components +end + +function _validate(data::Dict; schema::String="") + @warn "Validation is currently not updated to new YAML syntax and will therefore be skipped" + return nothing +end + +function _parse_components!(model::JuMP.Model, description::Dict{String, Any}) + @info "Parsing components from YAML" n_components = length(description) + + components = _iesopt(model).model.components + type_info = Dict(t => 0 for t in ["Connection", "Decision", "Node", "Profile", "Unit"]) + + for (desc, prop) in description + if _parse_bool(model, pop!(prop, "disabled", false)) || !_parse_bool(model, pop!(prop, "enabled", true)) + @critical "Disabled components should not end up in parse" + end + + type = pop!(prop, "type") + type_info[type] += 1 + name = desc + + # Place name of current attempted parse into `debug`. + _iesopt(model).debug = name + + # Calculate constraint safety settings. Those default to the model-wide settings. + constraint_safety = pop!(prop, "constraint_safety", _iesopt_config(model).optimization.constraint_safety) + constraint_safety_cost = + pop!(prop, "constraint_safety_cost", _iesopt_config(model).optimization.constraint_safety_cost) + + # Drop auxiliary columns (starting with `$`) that are only used by external tools. + for k in keys(prop) + (k[1] == '$') && delete!(prop, k) + end + + if haskey(prop, "objectives") + for (obj, term) in pop!(prop, "objectives") + if !haskey(_iesopt(model).aux._obj_terms, obj) + @critical "Objective not found in `objectives` definition" objective = obj component = name + end + _add_obj_term!(model, term; component=name, objective=obj) + end + end + + if type == "Node" + # Extract and convert the Carrier (possible since it is mandatory). + carrier = _iesopt(model).model.carriers[pop!(prop, "carrier")] + + # Convert to _Expression. + state_lb = _convert_to_expression(model, pop!(prop, "state_lb", nothing)) + state_ub = _convert_to_expression(model, pop!(prop, "state_ub", nothing)) + + # Convert to Symbol + state_cyclic = Symbol(pop!(prop, "state_cyclic", :eq)) + nodal_balance = Symbol(pop!(prop, "nodal_balance", :enforce)) + + components[name] = Node(; + model=model, + name=name, + carrier=carrier, + constraint_safety=constraint_safety, + constraint_safety_cost=constraint_safety_cost, + state_lb=state_lb, + state_ub=state_ub, + state_cyclic=state_cyclic, + nodal_balance=nodal_balance, + Dict(Symbol(k) => v for (k, v) in prop)..., + ) + elseif type == "Connection" + # Handle optional carrier. + carrier = pop!(prop, "carrier", nothing) + + node_from_carrier = if haskey(components, prop["node_from"]) + components[prop["node_from"]].carrier.name + else + description[prop["node_from"]]["carrier"] + end + + node_to_carrier = if haskey(components, prop["node_to"]) + components[prop["node_to"]].carrier.name + else + description[prop["node_to"]]["carrier"] + end + + if node_from_carrier != node_to_carrier + @critical "Carrier mismatch in Connection, connecting wrong Nodes" component = name + end + + if isnothing(carrier) + carrier = _iesopt(model).model.carriers[node_from_carrier] + else + if node_from_carrier != carrier + @critical "Carrier mismatch in Connection, wrong Carrier given" component = name + end + @info "Specifying `carrier` in Connection is not necessary" maxlog = 1 + carrier = _iesopt(model).model.carriers[carrier] + end + + # Convert to _Expression. + lb = _convert_to_expression(model, pop!(prop, "lb", nothing)) + ub = _convert_to_expression(model, pop!(prop, "ub", nothing)) + capacity = _convert_to_expression(model, pop!(prop, "capacity", nothing)) + cost = _convert_to_expression(model, pop!(prop, "cost", nothing)) + loss = _convert_to_expression(model, pop!(prop, "loss", nothing)) + + # Initialize. + components[name] = Connection(; + model=model, + name=name, + constraint_safety=constraint_safety, + constraint_safety_cost=constraint_safety_cost, + carrier=carrier, + lb=lb, + ub=ub, + capacity=capacity, + cost=cost, + loss=loss, + Dict(Symbol(k) => v for (k, v) in prop)..., + ) + elseif type == "Profile" + # Extract and convert the Carrier (possible since it is mandatory). + carrier = _iesopt(model).model.carriers[pop!(prop, "carrier")] + + # Convert to _Expression. + value = _convert_to_expression(model, pop!(prop, "value", nothing)) + lb = _convert_to_expression(model, pop!(prop, "lb", nothing)) + ub = _convert_to_expression(model, pop!(prop, "ub", nothing)) + cost = _convert_to_expression(model, pop!(prop, "cost", nothing)) + + # Convert to Symbol + mode = Symbol(pop!(prop, "mode", :fixed)) + allow_deviation = Symbol(pop!(prop, "allow_deviation", :off)) + + # Initialize. + components[name] = Profile(; + model=model, + name=name, + carrier=carrier, + constraint_safety=constraint_safety, + constraint_safety_cost=constraint_safety_cost, + value=value, + mode=mode, + lb=lb, + ub=ub, + cost=cost, + allow_deviation=allow_deviation, + Dict(Symbol(k) => v for (k, v) in prop)..., + ) + elseif type == "Unit" + # Convert strings that contain an `_Expression`. + + # The capacity is mandatory. + capacity_str = pop!(prop, "capacity") + if !(capacity_str isa AbstractString) || (!occursin("in:", capacity_str) && !occursin("out:", capacity_str)) + @critical "`capacity` must be specified with either `out:carrier` or `in:carrier`" unit = name + end + _capacity, _capacity_port = rsplit(capacity_str, " "; limit=2) + _capacity_inout, _capacity_carrier = split(_capacity_port, ":") + capacity_carrier = (inout=Symbol(_capacity_inout), carrier=_iesopt(model).model.carriers[_capacity_carrier]) + + # The marginal cost not. + if haskey(prop, "marginal_cost") + marginal_cost_str = pop!(prop, "marginal_cost") + if !occursin("in:", marginal_cost_str) && !occursin("out:", marginal_cost_str) + @critical "`marginal_cost` must be specified with either `out:carrier` or `in:carrier`" unit = name + end + _marginal_cost, _marginal_cost_port = strip.(rsplit(marginal_cost_str, "per"; limit=2)) + _marginal_cost_inout, _marginal_cost_carrier = split(_marginal_cost_port, ":") + marginal_cost_carrier = + (inout=Symbol(_marginal_cost_inout), carrier=_iesopt(model).model.carriers[_marginal_cost_carrier]) + else + _marginal_cost = nothing + marginal_cost_carrier = nothing + end + + # Convert to _Expression. + availability = _convert_to_expression(model, pop!(prop, "availability", nothing)) + availability_factor = _convert_to_expression(model, pop!(prop, "availability_factor", nothing)) + unit_count = _convert_to_expression(model, pop!(prop, "unit_count", 1)) + capacity = _convert_to_expression(model, _capacity) + marginal_cost = _convert_to_expression(model, _marginal_cost) + + # Convert to Symbol + unit_commitment = Symbol(pop!(prop, "unit_commitment", :off)) + + # Convert to carriers. + carriers = _iesopt(model).model.carriers + inputs = Dict{Carrier, String}(carriers[k] => v for (k, v) in pop!(prop, "inputs", Dict())) + outputs = Dict{Carrier, String}(carriers[k] => v for (k, v) in pop!(prop, "outputs", Dict())) + + # Initialize. + components[name] = Unit(; + model=model, + name=name, + constraint_safety=constraint_safety, + constraint_safety_cost=constraint_safety_cost, + inputs=inputs, + outputs=outputs, + availability=availability, + availability_factor=availability_factor, + unit_count=unit_count, + capacity=capacity, + marginal_cost=marginal_cost, + unit_commitment=unit_commitment, + capacity_carrier=capacity_carrier, + marginal_cost_carrier=marginal_cost_carrier, + Dict(Symbol(k) => v for (k, v) in prop)..., + ) + elseif type == "Decision" + # Convert to Symbol + mode = Symbol(pop!(prop, "mode", :linear)) + + lb = pop!(prop, "lb", 0) + ub = pop!(prop, "ub", nothing) + cost = pop!(prop, "cost", nothing) + + (lb isa AbstractString) && (lb = eval(Meta.parse(lb))) + (ub isa AbstractString) && (ub = eval(Meta.parse(ub))) + (cost isa AbstractString) && (cost = eval(Meta.parse(cost))) + + # Initialize. + components[name] = Decision(; + model=model, + name=name, + constraint_safety=constraint_safety, + constraint_safety_cost=constraint_safety_cost, + mode=mode, + lb=lb, + ub=ub, + cost=cost, + Dict(Symbol(k) => v for (k, v) in prop)..., + ) + # elseif type == "Expression" + # parametric = pop!(prop, "parametric", _iesopt_config(model).parametric_expressions) + # components[current_id] = Expression(; + # model=model, + # id=current_id, + # name=name, + # constraint_safety=constraint_safety, + # constraint_safety_cost=constraint_safety_cost, + # parametric=parametric, + # Dict(Symbol(k) => v for (k, v) in prop)..., + # ) + else + error("Non core components cannot be constructed.") + end + end + + @info "Finished parsing components" n = length(components) connections = type_info["Connection"] decisions = + type_info["Decision"] nodes = type_info["Node"] profiles = type_info["Profile"] units = type_info["Unit"] + + return _iesopt(model).debug = "parse complete" +end + +function _parse_components_csv!( + model::JuMP.Model, + data::Dict{String, Any}, + description::Dict{String, Any}; + path::Union{String, Nothing}=nothing, +) + !haskey(data, "load_components") && return + + # Prepare path. + path = isnothing(path) ? _iesopt_config(model).paths.components : path + + # Get all files, including a potential recursive search using regexp. + files_to_load = [] + for entry in data["load_components"] + if entry == ".csv" + length(data["load_components"]) == 1 || + @critical "Using `.csv` in `load_components` is only allowed as sole entry" + for (root, dirs, files) in walkdir(path) + for file in files + endswith(file, ".csv") || continue + push!(files_to_load, normpath(relpath(root, path), file)) + end + end + elseif endswith(entry, ".csv") + push!(files_to_load, normpath(entry)) + elseif endswith(entry, ".xlsx") + @critical "Excel files are not supported for component definitions" file = entry + else + # Example: + # Match all files in the `thermals/install` directory, except `biomass.csv`. + # "^thermals/install/((?!biomass\.csv$).)*\.csv$" + for (root, dirs, files) in walkdir(path) + for file in files + isnothing(match(Regex(entry), normpath(relpath(root, path), file))) && continue + push!(files_to_load, normpath(relpath(root, path), file)) + end + end + end + end + + warnlogcount = 0 + for file in files_to_load + df = _getfile(model, file; path=:components, slice=false) + + # todo: this is probably super inefficient + for row in eachrow(df) + name = row.name + if haskey(description, name) + @critical "Duplicate component entry detected" file component = name + end + props = row[DataFrames.Not(:name)] + + dict_entries = Vector{String}() + sizehint!(dict_entries, length(props)) + for (k, v) in zip(names(props), values(props)) + if !ismissing(v) + if !isnothing(_iesopt(model).input.parameters) && v[1] == '<' + # This is a parameter that we need to replace. + push!(dict_entries, "$k: $(_iesopt(model).input.parameters[v[2:(end - 1)]])") + else + push!(dict_entries, "$k: $v") + end + else + # Is this a global parameter that we should fill in automatically? + if !isnothing(_iesopt(model).input.parameters) && haskey(_iesopt(model).input.parameters, k) + if warnlogcount == 0 + @warn "You left a field empty in a CSV component defintion file that corresponds to a global parameter. Automatic replacement is happening. Did you really intend this?" component = + name property = k + warnlogcount += 1 + end + push!(dict_entries, "$k: $(_iesopt(model).input.parameters[k])") + end + + # We skip values that are "just missing". + end + end + + description[name] = YAML.load(join(dict_entries, "\n"); dicttype=Dict{String, Any}) + end + end +end diff --git a/src/precompile/precompile_tools.jl b/src/precompile/precompile_tools.jl new file mode 100644 index 0000000..f64e912 --- /dev/null +++ b/src/precompile/precompile_tools.jl @@ -0,0 +1,16 @@ +@setup_workload begin + # list = [...] + const dir = _PATHS[:examples] + + @compile_workload begin + if isnothing(Library) + else + model = IESopt.generate!(normpath(dir, "01_basic_single_node.iesopt.yaml"); verbosity=false) + IESopt.optimize!(model) + + IESopt.generate!(normpath(dir, "08_basic_investment.iesopt.yaml"); verbosity=false) + IESopt.generate!(normpath(dir, "09_csv_only.iesopt.yaml"); verbosity=false) + IESopt.generate!(normpath(dir, "46_constants_in_objective.iesopt.yaml"); verbosity=false) + end + end +end diff --git a/src/results/extract.jl b/src/results/extract.jl new file mode 100644 index 0000000..eb9ba77 --- /dev/null +++ b/src/results/extract.jl @@ -0,0 +1,71 @@ +function _convert_to_result(component::_CoreComponent) + ret = _CoreComponentResult( + Dict{Symbol, Any}(f => getfield(component, f) for f in _result_fields(component)), + _CoreComponentOptResultContainer(), + ) + + for field in [:exp, :var, :obj] + for (k, v) in getfield(getproperty(component, field), :dict) + setproperty!(getproperty(ret, field), k, JuMP.value.(v)) + end + end + + if JuMP.has_duals(component.model) + for (k, v) in getfield(getproperty(component, :var), :dict) + any(x -> isa.(x, JuMP.VariableRef), v) || continue # skip if not a variable (pf_theta can be Float64) + setproperty!(getproperty(ret, :var), Symbol("$(k)__dual"), JuMP.reduced_cost.(v)) + end + for (k, v) in getfield(getproperty(component, :con), :dict) + setproperty!(getproperty(ret, :con), Symbol("$(k)__dual"), JuMP.shadow_price.(v)) + end + end + + # Manually add the type and extracted fields for better access when loading results. + getfield(ret, :_info)[:__type] = string(_component_type(component)) + getfield(ret, :_info)[:__fields] = _result_fields(component) + + return ret +end + +function _extract_results(model::JuMP.Model) + @info "Begin extracting results" + # TODO: support multiple results (from MOA) + + result_components = _iesopt(model).results.components + result_objectives = _iesopt(model).results.objectives + result_customs = _iesopt(model).results.customs + components = _iesopt(model).model.components + + model_has_duals = JuMP.has_duals(model) + + _safe_get(jump_data::Any) = hasproperty(jump_data, :data) ? jump_data.data : jump_data + + # Prepare any custom objects (added to the JuMP model manually; must be named ones). + for (n, v) in JuMP.object_dictionary(model) + if any(x -> isa.(x, JuMP.VariableRef), v) + result_customs[n] = JuMP.value.(_safe_get(v)) + if model_has_duals + result_customs[Symbol("$(n)__dual")] = JuMP.reduced_cost.(_safe_get(v)) + end + elseif model_has_duals && any(x -> isa.(x, JuMP.ConstraintRef), v) + result_customs[Symbol("$(n)__dual")] = JuMP.shadow_price.(_safe_get(v)) + elseif any(x -> isa.(x, JuMP.AffExpr), v) + result_customs[n] = JuMP.value.(_safe_get(v)) + end + end + + merge!(result_components, Dict(k => (@profile model _convert_to_result(v)) for (k, v) in components)) + merge!(result_objectives, Dict(k => JuMP.value(v.expr) for (k, v) in _iesopt(model).model.objectives)) + + # Add results that were defined by Core Templates. + for (component_name, entry) in _iesopt(model).results._templates + symbolized_parameters = Dict{Symbol, Any}(Symbol(k) => v for (k, v) in entry.parameters) + for item in entry.items + _result = _CoreComponentResult(symbolized_parameters, _CoreComponentOptResultContainer()) + setproperty!(getproperty(_result, :res), Symbol(item.name), JuMP.value.(item.expr)) + result_components[component_name] = _result + end + end + + return nothing +end diff --git a/src/results/jld2.jl b/src/results/jld2.jl new file mode 100644 index 0000000..83a992b --- /dev/null +++ b/src/results/jld2.jl @@ -0,0 +1,111 @@ +function _get_git() + try + repo = LibGit2.GitRepo("./") + + if LibGit2.isdirty(repo) + @warn "The git repository is dirty (you should always commit changes before a run)" + end + + git_snapshot = LibGit2.snapshot(repo) + + return OrderedDict( + "branch" => LibGit2.headname(repo), + "path" => LibGit2.path(repo), + "commit" => LibGit2.head_oid(repo), + "snapshot" => OrderedDict(f => getfield(git_snapshot, f) for f in fieldnames(typeof(git_snapshot))), + "remotes" => OrderedDict(r => LibGit2.url(LibGit2.lookup_remote(repo, r)) for r in LibGit2.remotes(repo)), + ) + catch error + @warn "Could not find a valid git repository; consider using git for every model development" + end + + return OrderedDict{String, Any}() +end + +function _get_hash(model::JuMP.Model) + @error "Hashing is disabled until we decide on which files to include" + return "" + + # @info "Hashing model description" + + # _hash = SHA.SHA256_CTX() + # for (root, _, files) in walkdir(_iesopt_config(model).paths.main) + # occursin(dirname(_iesopt_config(model).paths.results), root) && continue + # for file in files + # SHA.update!(_hash, open(read, normpath(root, file))) + # end + # end + + # return bytes2hex(SHA.digest!(_hash)) +end + +function _get_solver_log(model::JuMP.Model) + file = abspath(_iesopt_config(model).paths.results, "solver.log") + isfile(file) || return "" + return read(file, String) +end + +function _get_iesopt_log(model::JuMP.Model) + try + file = abspath(replace(string(_iesopt(model).logger.loggers[2].logger.stream.name), " "", ">" => "")) + return String(open(read, file)) + catch error + end + + return "" +end + +function _save_results(model::JuMP.Model) + _iesopt_config(model).results.memory_only && return nothing + + @info "Begin saving results" + # TODO: support multiple results (from MOA) + + # Make sure the path is valid. + filepath = normpath(_iesopt_config(model).paths.results, "$(_iesopt_config(model).names.scenario).mfres.jld2") + mkpath(dirname(filepath)) + + # Write results. + JLD2.jldopen(filepath, "w"; compress=_iesopt_config(model).results.compress) do file + file["model/components"] = _iesopt(model).results.components + file["model/objectives"] = _iesopt(model).results.objectives + file["model/snapshots"] = _iesopt(model).model.snapshots + file["model/carriers"] = _iesopt(model).model.carriers + file["model/custom"] = _iesopt(model).results.customs + + file["attributes/iesopt_version"] = string(pkgversion(@__MODULE__)) + file["attributes/solver_name"] = JuMP.solver_name(model) + file["attributes/termination_status"] = string(JuMP.termination_status(model)) + file["attributes/solver_status"] = string(JuMP.raw_status(model)) + file["attributes/result_count"] = JuMP.result_count(model) + file["attributes/objective_value"] = JuMP.objective_value(model) + file["attributes/solve_time"] = JuMP.solve_time(model) + file["attributes/has_duals"] = JuMP.has_duals(model) + file["attributes/primal_status"] = Int(JuMP.primal_status(model)) + file["attributes/dual_status"] = Int(JuMP.dual_status(model)) + + if :input in _iesopt_config(model).results.include + file["input/config/toplevel"] = _iesopt(model).input._tl_yaml + file["input/config/flattened"] = _iesopt(model).aux._flattened_description + file["input/config/parsed"] = _iesopt(model).input.config + file["input/parameters"] = _iesopt(model).input.parameters + end + + # file["info/hash"] = @profile _get_hash(model) + if :git in _iesopt_config(model).results.include + file["info/git"] = @profile model _get_git() + end + if :log in _iesopt_config(model).results.include + file["info/logs/iesopt"] = _get_iesopt_log(model) + file["info/logs/solver"] = _get_solver_log(model) + end + + return nothing + end + + @info "Results saved to JLD2" file = abspath(filepath) +end + +function load_results(filename::String) + return JLD2.load(filename) +end diff --git a/src/results/results.jl b/src/results/results.jl new file mode 100644 index 0000000..1ea0082 --- /dev/null +++ b/src/results/results.jl @@ -0,0 +1,2 @@ +include("extract.jl") +include("jld2.jl") diff --git a/src/templates/functions/finalize.jl b/src/templates/functions/finalize.jl new file mode 100644 index 0000000..65cbba7 --- /dev/null +++ b/src/templates/functions/finalize.jl @@ -0,0 +1,84 @@ +""" + @add_result + +Add a custom result to the current model. + +# Example + +```yaml +functions: + finalize: | + @add_result "setpoint" ( + access("discharge").exp.out_electricity - + access("charge").exp.in_electricity + ) +``` + +See ["Template Finalization"](@ref manual_templates_finalization) in the documentation for more information. + +!!! warning "Usage outside of Core Template finalization" + This requires `__component__`, and `MODEL` to be set outside of calling the macro. +""" +macro add_result(result_name, result_expr, args...) + if !isempty(args) + return esc(quote + @critical "`@add_result` got more than two arguments" component = __component__ + end) + end + + return esc(quote + try + local templates = _iesopt(MODEL.model).results._templates + push!(templates[__component__].items, (name=$result_name, expr=$result_expr)) + catch e + local cname = $(:__component__) + rethrow(ErrorException("""Got unexpected error while finalizing a template instance. + ------------ + > COMPONENT: $cname + > RESULT: $($result_name) + ------------ + > ERROR: $e + ------------ + """)) + end + end) +end + +function _build_template_function_finalize(template::CoreTemplate) + if !haskey(template.yaml, "functions") || !haskey(template.yaml["functions"], "finalize") + template.functions[:finalize] = (::JuMP.Model, ::String, ::Dict{String, Any}) -> nothing + return nothing + end + + # Get code from "finalize" and remove trailing newline. + code = chomp(template.yaml["functions"]["finalize"]) + + # Replace the `get` function (that would otherwise conflict with Julia's `get` function). + code = replace(code, r"""get\("([^"]+)"\)""" => s"""_get_parameter_safe("\1", __parameters__)""") + + # Parse the code into an expression. + code_ex = Meta.parse("""begin\n$(code)\nend"""; filename="$(template.name).iesopt.template.yaml") + + # Convert into a proper function. + template.functions[:finalize] = @RuntimeGeneratedFunction( + :(function (__model__::JuMP.Model, __component__::String, __parameters__::Dict{String, Any}) + MODEL = Utilities.ModelWrapper(__model__) + __template_name__ = $(template).name + + get_ts(s::String) = _get_timeseries_safe(s, __parameters__, __model__) + access(sub::String) = component(__model__, "$(__component__).$(sub)") + + try + $code_ex + catch e + template = __template_name__ + component = __component__ + @error "Error while finalizing component" error = string(e) template component + rethrow(e) + end + return nothing + end) + ) + + return nothing +end diff --git a/src/templates/functions/functions.jl b/src/templates/functions/functions.jl new file mode 100644 index 0000000..1900f7b --- /dev/null +++ b/src/templates/functions/functions.jl @@ -0,0 +1,3 @@ +include("prepare.jl") +include("validate.jl") +include("finalize.jl") diff --git a/src/templates/functions/prepare.jl b/src/templates/functions/prepare.jl new file mode 100644 index 0000000..18e5055 --- /dev/null +++ b/src/templates/functions/prepare.jl @@ -0,0 +1,39 @@ +function _build_template_function_prepare(template::CoreTemplate) + if !haskey(template.yaml, "functions") || !haskey(template.yaml["functions"], "prepare") + template.functions[:prepare] = (::Dict{String, Any}, ::String) -> nothing + return nothing + end + + # Get code from "prepare" and remove trailing newline. + code = chomp(template.yaml["functions"]["prepare"]) + + # Replace the `get` function (that would otherwise conflict with Julia's `get` function). + code = replace(code, r"""get\("([^"]+)"\)""" => s"""_get_parameter_safe("\1", __parameters__)""") + + # Parse the code into an expression. + code_ex = Meta.parse("""begin\n$(code)\nend"""; filename="$(template.name).iesopt.template.yaml") + + # Convert into a proper function. + template.functions[:prepare] = @RuntimeGeneratedFunction( + :(function (__parameters__::Dict{String, Any}, __component__::String) + MODEL = Utilities.ModelWrapper($(template).model) + __template_name__ = $(template).name + + set(p::String, v::Any) = _set_parameter_safe(p, v, __parameters__) + get_ts(s::String) = _get_timeseries_safe(s, __parameters__, MODEL.model) + set_ts(s::String, v::Any) = _set_timeseries_safe(s, v, __parameters__, MODEL.model) + + try + $code_ex + catch e + template = __template_name__ + component = __component__ + @error "Error while preparing component" error = string(e) template component + rethrow(e) + end + return nothing + end) + ) + + return nothing +end diff --git a/src/templates/functions/validate.jl b/src/templates/functions/validate.jl new file mode 100644 index 0000000..7e7e34d --- /dev/null +++ b/src/templates/functions/validate.jl @@ -0,0 +1,97 @@ +""" + @check + +Check whether the passed expression passes an `ArgCheck.@check`, especially helpful to validate a Template's parameters. + +# Example + +A `validate` section added to a Template can make use of the `@check` macro to validate the parameters passed to the +template. The macro will properly raise a descriptive error if the condition is not met. + +```yaml +parameters: + p: null + +functions: + validate: | + @check parameters["p"] isa Number + @check parameters["p"] > 0 +``` + +See ["Template Validation"](@ref manual_templates_validation) in the documentation for more information. + +!!! warning "Usage outside of Core Template validation" + This requires `__component__` to be set to some `String` outside of calling the macro, since it accesses this to + construct a proper error message. +""" +macro check(expr) + return esc( + quote + try + $ArgCheck.@check $expr + catch e + if isa(e, $ArgCheck.CheckError) + local cname = $(:__component__) + local message = replace(e.msg, "\n" => " ", "\"" => "'") + if occursin(" must hold. Got ", message) + local violated, reason = split(message, " must hold. Got ") + @error "Template validation error" component = cname violated reason + else + @error "Template validation error" component = cname message = + replace(e.msg, "\n" => "; ", "\"" => "'") + end + + $(:__valid__) = false + else + local cname = $(:__component__) + rethrow(ErrorException("""Got unexpected error while validating the template. + ------------ + > COMPONENT: $cname + ------------ + > ERROR: $e + ------------ + """)) + end + end + end, + ) +end + +function _build_template_function_validate(template::CoreTemplate) + if !haskey(template.yaml, "functions") || !haskey(template.yaml["functions"], "validate") + template.functions[:validate] = (::Dict{String, Any}, ::String) -> true + return nothing + end + + # Get code from "validate" and remove trailing newline. + code = chomp(template.yaml["functions"]["validate"]) + + # Replace the `get` function (that would otherwise conflict with Julia's `get` function). + code = replace(code, r"""get\("([^"]+)"\)""" => s"""_get_parameter_safe("\1", __parameters__)""") + + # Parse the code into an expression. + code_ex = Meta.parse("""begin\n$(code)\nend"""; filename="$(template.name).iesopt.template.yaml") + + # Convert into a proper function. + template.functions[:validate] = @RuntimeGeneratedFunction( + :(function (__parameters__::Dict{String, Any}, __component__::String) + MODEL = Utilities.ModelWrapper($(template).model) + __template_name__ = $(template).name + + get_ts(s::String) = _get_timeseries_safe(s, __parameters__, MODEL.model) + + __valid__ = true + try + $code_ex + catch e + template = __template_name__ + component = __component__ + @error "Error while validating component" error = string(e) template component + rethrow(e) + end + return __valid__ + end) + ) + + return nothing +end diff --git a/src/templates/load.jl b/src/templates/load.jl new file mode 100644 index 0000000..eefbfd4 --- /dev/null +++ b/src/templates/load.jl @@ -0,0 +1,93 @@ +function _load_template(model::JuMP.Model, filename::String; read_file::Bool=false) + name = _get_template_name(filename) + read_file && @info "Loading template file" name + + template = CoreTemplate(; + model=model, + name=name, + path=dirname(filename), + raw=read_file ? read(filename, String) : "", + _status=read_file ? Ref(:raw) : Ref(:empty), + ) + + _iesopt(model).input.noncore[:templates][name] = template + return template +end + +_load_template(template::CoreTemplate) = + _load_template(template.model, normpath(template.path, "$(template.name).iesopt.template.yaml"); read_file=true) + +function _load_template_yaml!(template::CoreTemplate) + merge!(template.yaml, YAML.load(template.raw; dicttype=Dict{String, Any})) + template._status[] = :yaml + + has_components = haskey(template.yaml, "components") + has_component = haskey(template.yaml, "component") + + if has_components && !has_component + template.type[] = :container + elseif !has_components && has_component + template.type[] = :component + else + @critical "Template type could not be determined" template = template.name + end + + # Build all registered functions for this template. + _build_template_function_prepare(template) + _build_template_function_validate(template) + _build_template_function_finalize(template) + + return nothing +end + +function _scan_all_templates(model::JuMP.Model) + # Prepare the templates dictionary. + _iesopt(model).input.noncore[:templates] = Dict{String, CoreTemplate}() + + # Scan for templates in template folder and core internal templates. + all_template_files = Set{String}() + for dir in [_iesopt_config(model).paths.templates, _PATHS[:templates]] + for (root, _, files) in walkdir(dir) + isempty(files) && continue + for filename in files + _is_template(filename) || continue + (filename in all_template_files) && @critical "Duplicate file found in template folder" root filename + push!(all_template_files, normpath(root, filename)) + end + end + end + + # Load all templates, without actually reading in the files. + for template_file in all_template_files + _load_template(model, template_file) + end + + @info "Finished scanning templates" count = length(_iesopt(model).input.noncore[:templates]) + + # valid_templates = [ + # path for + # path in _iesopt(model).input.noncore[:paths] if isfile(normpath(path, string(type, ".iesopt.template.yaml"))) + # ] + # (length(valid_templates) == 0) && error("Type template <$type.iesopt.template.yaml> could not be found") + # (length(valid_templates) != 1) && error("Type template <$type.iesopt.template.yaml> is ambiguous") + + # template_path = valid_templates[1] + # template_file = normpath(template_path, string(type, ".iesopt.template.yaml")) + + # _iesopt(model).input.noncore[:templates][type] = YAML.load_file(template_file; dicttype=Dict{String, Any}) + # _iesopt(model).input.noncore[:templates][type]["path"] = template_path + # @info "Encountered non-core component" type = type template = template_file +end + +function _require_template(model::JuMP.Model, name::String) + haskey(_iesopt(model).input.noncore[:templates], name) || @critical "`CoreTemplate` not found" name + template = _iesopt(model).input.noncore[:templates][name] + + if template._status[] == :empty + template = _load_template(template) + end + + (template._status[] == :raw) && _load_template_yaml!(template) + + return template +end diff --git a/src/templates/parse.jl b/src/templates/parse.jl new file mode 100644 index 0000000..163ff21 --- /dev/null +++ b/src/templates/parse.jl @@ -0,0 +1,317 @@ +function _parse_noncore_component!( + model::JuMP.Model, + type::String, + configuration::Dict{String, Any}, + cname::String, +)::Dict{String, Any} + # Get template and file. + template = _iesopt(model).input.noncore[:templates][type] + parameters = deepcopy(get(template.yaml, "parameters", Dict{String, Any}())) + + # Parse parameters from configuration. + for (param, value) in parameters + parameters[param] = pop!(configuration, param, value) + end + + # Remove parameters prefixed with "_" since those should not be considered. + delete!.(Ref(configuration), [k for k in keys(configuration) if startswith(k, "_")]) + + # Add "name" replacement parameter(s). + level = cname + accessor = "." + while true + parameters[accessor] = level + !occursin(".", level) && break + level, _ = rsplit(level, "."; limit=2) + accessor *= "." + end + if haskey(parameters, "self") + @warn "Use of `` as parameter detected; this can lead to confusion and should be avoided" component = + cname + else + parameters["self"] = parameters["."] + end + if haskey(parameters, "..") + if haskey(parameters, "parent") + @warn "Use of `` as parameter detected; this can lead to confusion and should be avoided" component = + cname + else + parameters["parent"] = parameters[".."] + end + end + if haskey(parameters, "name") + @warn "Use of `` as parameter detected; this can lead to confusion and should be avoided" component = + cname + else + parameters["name"] = split(cname, ".")[end] + end + + # Add global parameters + for (k, v) in _iesopt(model).input.parameters + if haskey(parameters, k) + @warn "Ambiguous parameter in component and global specification; using local value" component = cname parameter = + k + continue + end + parameters[k] = v + end + + # Validate and then prepare. + template.functions[:validate](parameters, cname) || @critical "Template validation failed" component = cname + template.functions[:prepare](parameters, cname) + + # Add an entry for finalization. + _iesopt(model).results._templates[cname] = + (finalize=template.functions[:finalize], parameters=parameters, items=Vector{Any}()) + + # Construct the parsed core component with all parameter replacements. + replacements = Regex(join(["<$k>" for k in keys(parameters)], "|")) + if length(parameters) == 0 + comp = template.yaml["component"] + else + comp = Dict{String, Any}() + for (k, v) in template.yaml["component"] + _new_component_str = replace( + replace(YAML.write(v), replacements => p -> parameters[p[2:(end - 1)]]), + "\"" => "", # this is necessary to prevent `Number`s being enclosed with "", ending up as `String` + "nothing" => "null", # this is necessary to properly preserve "null" (as nothing) + ) + if occursin("<", _new_component_str) + param_begin = findfirst("<", _new_component_str)[1] + param_end = findnext(">", _new_component_str, param_begin)[1] + parameter = _new_component_str[param_begin:param_end] + @critical "Parameter placeholder not replaced" component = cname parameter + end + comp[k] = YAML.load(_new_component_str; dicttype=Dict{String, Any}) + end + end + + # Add potential files to the overall file list. + if haskey(template.yaml, "files") + for file in template.yaml["files"] + filedescr = replace(file[1], replacements => p -> parameters[p[2:(end - 1)]]) + haskey(_iesopt(model).input.files, filedescr) && continue + + filename = replace(file[2], replacements => p -> parameters[p[2:(end - 1)]]) + _iesopt(model).input.files[filedescr] = _getfile(model, filename) + end + end + + # Report possibly wrongly accessed attributes. + for (attr, _) in configuration + @error "Non exported component attribute accessed" type = type attribute = attr + end + + # TODO: allow "Set"s in single components too! + + return comp +end + +function _parse_container!( + model::JuMP.Model, + description::Dict{String, Any}, + name::String, + type::String, +)::Vector{String} + # Get template and file. + template = _iesopt(model).input.noncore[:templates][type] + parameters = copy(get(template.yaml, "parameters", Dict{String, Any}())) + + # Get top-level configuration. + configuration = description[name] + + # Remove parameters prefixed with "_" since those should not be considered. + delete!.(Ref(configuration), [k for k in keys(configuration) if startswith(k, "_")]) + + # Parse parameters from configuration. + for (param, value) in parameters + parameters[param] = pop!(configuration, param, value) + end + + # Report possibly wrongly accessed attributes. + for (attr, _) in configuration + @error "Non exported component attribute accessed" name = name attribute = attr + end + + # Add "name" replacement parameter(s). + level = name + accessor = "." + while true + parameters[accessor] = level + !occursin(".", level) && break + level, _ = rsplit(level, "."; limit=2) + accessor *= "." + end + if haskey(parameters, "self") + @warn "Use of `` as parameter detected; this can lead to confusion and should be avoided" component = name + else + parameters["self"] = parameters["."] + end + if haskey(parameters, "..") + if haskey(parameters, "parent") + @warn "Use of `` as parameter detected; this can lead to confusion and should be avoided" component = + name + else + parameters["parent"] = parameters[".."] + end + end + if haskey(parameters, "name") + @warn "Use of `` as parameter detected; this can lead to confusion and should be avoided" component = name + else + parameters["name"] = split(name, ".")[end] + end + + # Add global parameters + for (k, v) in _iesopt(model).input.parameters + if haskey(parameters, k) + @warn "Ambiguous parameter in component and global specification; using local value" component = name parameter = + k + continue + end + parameters[k] = v + end + + # Validate and then prepare. + template.functions[:validate](parameters, name) || @critical "Template validation failed" component = name + template.functions[:prepare](parameters, name) + + # Add an entry for finalization. + _iesopt(model).results._templates[name] = + (finalize=template.functions[:finalize], parameters=parameters, items=Vector{Any}()) + + # Construct the parsed container with all parameter replacements. + replacements = Regex(join(["<$k>" for k in keys(parameters)], "|")) + _new_components_str = replace( + replace(YAML.write(template.yaml["components"]), replacements => p -> parameters[p[2:(end - 1)]]), + "\"" => "", # this is necessary to prevent `Number`s being enclosed with "", ending up as `String` + "nothing" => "null", # this is necessary to properly preserve "null" (as nothing) + ) + if occursin("<", _new_components_str) + param_begin = findfirst("<", _new_components_str)[1] + param_end = findnext(">", _new_components_str, param_begin)[1] + parameter = _new_components_str[param_begin:param_end] + @critical "Parameter placeholder not replaced" component = name parameter + end + components = YAML.load(_new_components_str; dicttype=Dict{String, Any}) + + # Add potential files to the overall file list. + if haskey(template.yaml, "files") + for file in template.yaml["files"] + filedescr = replace(file[1], replacements => p -> parameters[p[2:(end - 1)]]) + haskey(_iesopt(model).input.files, filedescr) && continue + filename = replace(file[2], replacements => p -> parameters[p[2:(end - 1)]]) + _iesopt(model).input.files[filedescr] = _getfile(model, filename) + end + end + + # Resolve potentially existing CSV components in the template + csv_components = Dict{String, Any}() + _parse_components_csv!(model, template.yaml, csv_components; path=template.path) + + # Ensure proper parameter replacement for all loaded CSV components + csv_components = YAML.load( + replace( + replace(YAML.write(csv_components), replacements => p -> parameters[p[2:(end - 1)]]), + "\"" => "", # this is necessary to prevent `Number`s being enclosed with "", ending up as `String` + "nothing" => "null", # this is necessary to properly preserve "null" (as nothing) + ); + dicttype=Dict{String, Any}, + ) + + # Add all parts of the container to the description with "."-updated names. + new_components = [] + for (cname, cdesc) in components + if cdesc["type"] == "Set" + # Check if the Set is disabled. + if _parse_bool(model, pop!(cdesc, "disabled", false)) || !_parse_bool(model, pop!(cdesc, "enabled", true)) + continue + end + + if haskey(cdesc, "components") + # Add all components of the set. + for (set_cname, set_cdesc) in cdesc["components"] + _fullname = "$name.$set_cname" + if haskey(description, _fullname) + @critical "Resolving a set resulted in non-unique components" name set = cname component = + set_cname + end + description[_fullname] = set_cdesc + push!(new_components, _fullname) + end + elseif haskey(cdesc, "component") + # Just add the single component. + if length(new_components) > 0 + @critical "Single component Sets can not produce new components" name set = cname + end + @warn "Single component Sets are performing self-replacement; if you do not understand or expect this warning, there is most likely a misconfiguration happening" maxlog = + 1 name set = cname + description[name] = cdesc["component"] + return [name] + else + @critical "Set is missing `components` and `component` key" name set = cname + end + else + if haskey(description, "$name.$cname") + @critical "Resolving a container resulted in non-unique components" name component = cname + end + description["$name.$cname"] = cdesc + push!(new_components, "$name.$cname") + end + end + + # Add all parts of the CSV to the description with "."-updated names. + for (cname, cdesc) in csv_components + description["$name.$cname"] = cdesc + push!(new_components, "$name.$cname") + end + + # Remove the original container description. + delete!(description, name) + + return new_components +end + +function _parse_noncore!(model::JuMP.Model, description::Dict{String, Any}, cname::String)::Vector{String} + # Check if this component or container is disabled. For a component we can immediately "skip" it here, for a + # container, disabling will also disable every contained component, therefore we can also "skip" it completely. + if _parse_bool(model, pop!(description[cname], "disabled", false)) || + !_parse_bool(model, pop!(description[cname], "enabled", true)) + # We delete the element (component or container) from the model. + delete!(description, cname) + # Returning `[]` makes sure that no new components are added to the flattened model (see `_flatten_model!`). + return String[] + end + + type = pop!(description[cname], "type") + template = _require_template(model, type) + # if !haskey(_iesopt(model).input.noncore[:templates], type) + # valid_templates = [ + # path for + # path in _iesopt(model).input.noncore[:paths] if isfile(normpath(path, string(type, ".iesopt.template.yaml"))) + # ] + # (length(valid_templates) == 0) && error("Type template <$type.iesopt.template.yaml> could not be found") + # (length(valid_templates) != 1) && error("Type template <$type.iesopt.template.yaml> is ambiguous") + + # template_path = valid_templates[1] + # template_file = normpath(template_path, string(type, ".iesopt.template.yaml")) + + # _iesopt(model).input.noncore[:templates][type] = YAML.load_file(template_file; dicttype=Dict{String, Any}) + # _iesopt(model).input.noncore[:templates][type]["path"] = template_path + # @info "Encountered non-core component" type = type template = template_file + # end + + # is_container = haskey(_iesopt(model).input.noncore[:templates][type], "components") + # is_component = haskey(_iesopt(model).input.noncore[:templates][type], "component") + + if _is_component(template) + description[cname] = _parse_noncore_component!(model, type, description[cname], cname) + return [cname] + elseif _is_container(template) + return _parse_container!(model, description, cname, type) + end + + @critical "Core Template seems to be neither `component` nor `container`, check specification of `components: ...` and/or `component: ...` entry" name = + type + + return String[] +end diff --git a/src/templates/templates.jl b/src/templates/templates.jl new file mode 100644 index 0000000..6033173 --- /dev/null +++ b/src/templates/templates.jl @@ -0,0 +1,125 @@ +""" + CoreTemplate + +A struct to represent an IESopt.jl "Core Template". +""" +@kwdef struct CoreTemplate + model::JuMP.Model + name::String + path::String + raw::String + yaml::Dict{String, Any} = Dict{String, Any}() + + """A dictionary of functions that can be called by the template, options are `:validate`, `:prepare`, `:finalize`.""" + functions::Dict{Symbol, Function} = Dict{Symbol, Function}() + + """Type of this `CoreTemplate`: `:container` (if `"components"` exists), `:component` (if `"component"` exists).""" + type::Ref{Symbol} = Ref(:none) + + _status::Ref{Symbol} +end + +function _get_parameter_safe(p::String, parameters::Dict{String, Any}) + haskey(parameters, p) || @critical "Trying to access (`get`) undefined parameter in `CoreTemplate`" parameter = p + return parameters[p] +end + +function _set_parameter_safe(p::String, v::Any, parameters::Dict{String, Any}) + haskey(parameters, p) || @critical "Trying to access (`set`) undefined parameter in `CoreTemplate`" parameter = p + parameters[p] = v + return nothing +end + +function _get_timeseries_safe(p_or_cf::String, parameters::Dict{String, Any}, model::JuMP.Model) + if !occursin("@", p_or_cf) + p_or_cf = _get_parameter_safe(p_or_cf, parameters)::String + end + + # Now we know, that `p_or_cf` is a "col@file" selector string. + column, file = string.(split(p_or_cf, "@")) + + return _getfromcsv(model, file, column) +end + +function _set_timeseries_safe(p_or_cf::String, v::Any, parameters::Dict{String, Any}, model::JuMP.Model) + if !occursin("@", p_or_cf) + p_or_cf = _get_parameter_safe(p_or_cf, parameters)::String + end + + # Now we know, that `p_or_cf` is a "col@file" selector string. + column, file = string.(split(p_or_cf, "@")) + + # Check if this file exists. + if haskey(_iesopt(model).input.files, file) + # This works for overwriting existing columns, as well as adding new ones. + _iesopt(model).input.files[file][!, column] .= v + else + _iesopt(model).input.files[file] = DataFrames.DataFrame(column => v) + end + + return nothing +end + +include("functions/functions.jl") +include("load.jl") +include("parse.jl") + +_is_template(filename::String) = endswith(filename, ".iesopt.template.yaml") +_get_template_name(filename::String) = string(rsplit(basename(filename), "."; limit=4)[1]) +_is_component(template::CoreTemplate) = template.type[] == :component +_is_container(template::CoreTemplate) = template.type[] == :container + +function Base.show(io::IO, template::CoreTemplate) + str_show = "IESopt.CoreTemplate: $(template.name)" + return print(io, str_show) +end + +function analyse(template::CoreTemplate) + old_status = template._status[] + template = _require_template(template.model, template.name) + template._status[] = old_status + + child_types::Vector{String} = sort!(collect(Set(if haskey(template.yaml, "component") + [template.yaml["component"]["type"]] + else + [v["type"] for v in values(template.yaml["components"])] + end))) + + child_templates = [t for t in child_types if t ∉ ["Connection", "Decision", "Node", "Profile", "Unit"]] + child_corecomponents = [t for t in child_types if t in ["Connection", "Decision", "Node", "Profile", "Unit"]] + + docs = "" + for line in eachline(IOBuffer(template.raw)) + startswith(line, "#") || break + length(line) >= 3 || continue + docs = "$(docs)\n$(line[3:end])" + end + + if isempty(docs) + @warn "Encountered empty docstring for `CoreTemplate`" template = template.name + else + docs = docs[2:end] # remove the leading `\n` + startswith(docs, "# ") || + @warn "`CoreTemplate` docstring should start with main header (`# Your Title`)" template = template.name + for section in ["Parameters", "Components", "Usage"] + occursin("## $(section)\n", docs) || + @warn "`CoreTemplate` is missing mandatory section in docstring" template = template.name section + end + end + + return ( + name=template.name, + was_prepared=old_status == :yaml, + docs=Markdown.parse(docs), + functions=keys(get(template.yaml, "functions", Dict{String, Any}())), + parameters=get(template.yaml, "parameters", Dict{String, Any}()), + child_templates=child_templates, + child_corecomponents=child_corecomponents, + ) +end + +function create_docs(template::CoreTemplate) + info = analyse(template) + # TODO + return info.docs +end diff --git a/src/texify/constraints.jl b/src/texify/constraints.jl new file mode 100644 index 0000000..11c7317 --- /dev/null +++ b/src/texify/constraints.jl @@ -0,0 +1,116 @@ +function _compare_constraints(c1, c2) + info1 = _parse_base_name(c1) + info2 = _parse_base_name(c2) + + if info1[1] != info2[1] + # The parents do not match, these are different constraints. + return :distinct, info2[3] + end + + if info1[2] != info2[2] + # The base names do not match, these are different constraints. + return :distinct, info2[3] + end + + if isnothing(info1[3]) || isnothing(info2[3]) + # At least one of the constraints is not an indexed constraint (therefore a unique one). + return :distinct, info2[3] + end + + if typeof(JuMP.constraint_object(c1).set) != typeof(JuMP.constraint_object(c2).set) + # The sets are different (e.g. `=` vs. `<=`), so the constraints are distinct. + return :distinct, info2[3] + end + + # Get the terms. + terms1 = JuMP.linear_terms(JuMP.constraint_object(c1).func) + terms2 = JuMP.linear_terms(JuMP.constraint_object(c2).func) + + if length(terms1) != length(terms2) + # If the number of terms is not the same, the constraints are different. + return :distinct, info2[3] + end + + # Parse the coefficients, based on the extracted variable information. + coeff1 = Dict(_parse_base_name(term[2]; base_index=(info1[3] - 1)) => term[1] for term in terms1) + coeff2 = Dict(_parse_base_name(term[2]; base_index=(info2[3] - 1)) => term[1] for term in terms2) + + possible_return = :equal + for (k, v) in coeff1 + if !haskey(coeff2, k) + # Constraint 2 misses a term that constraint 1 has. + return :distinct, info2[3] + end + + if !(coeff2[k] ≈ v) + # The coefficients are different. + possible_return = :alike + end + end + + # Check if the right-hand sides match. + if JuMP.normalized_rhs(c1) != JuMP.normalized_rhs(c2) + possible_return = :alike + end + + return possible_return, info2[3] # info2[3] = "n2" +end + +function _group_constraints(constraints) + constr_groups = Dict{JuMP.ConstraintRef, NamedTuple}() + for n in eachindex(constraints) + elem = constraints[n] + + assigned = false + for (repr, cg) in constr_groups + similarity, n = _compare_constraints(repr, elem) + + if similarity === :equal + push!(constr_groups[repr].constraints, elem) + push!(constr_groups[repr].indices, n) + elseif similarity === :alike + push!(constr_groups[repr].constraints, elem) + push!(constr_groups[repr].indices, n) + constr_groups[repr] = ( + constraints=constr_groups[repr].constraints, + similarity=:alike, + indices=constr_groups[repr].indices, + ) + else + continue + end + + assigned = true + break + end + + if !assigned + constr_groups[elem] = (constraints=[elem], similarity=:equal, indices=[n]) + end + end + + return constr_groups +end + +""" + _parse_base_name(var::JuMP.ConstraintRef) + +Parse the parent, the name, and the index from a `ConstraintRef`. +""" +function _parse_base_name(var::JuMP.ConstraintRef) + name = JuMP.name(var) + + if occursin("[", name) + # This is a constraint indexed by time. + name, idx = split(name, "[") + idx = parse(Int64, idx[1:(end - 1)]) + else + # This is a single constraint. + idx = nothing + end + + # The name is the right-most part, after the last `.`. + parent, name = rsplit(name, "."; limit=2) + + return parent, Symbol(name), idx +end diff --git a/src/texify/print.jl b/src/texify/print.jl new file mode 100644 index 0000000..fb404ea --- /dev/null +++ b/src/texify/print.jl @@ -0,0 +1,352 @@ +function _constr_group_equal_tostring(group; fixed_t=false) + terms = JuMP.linear_terms(JuMP.constraint_object(group.constraints[1]).func) + ct = group.indices[1] + + lhs = _expr_tostring([(_escape_variable(JuMP.name(term[2]), ct; fixed_t=fixed_t), term[1]) for term in terms]) + + sign = ( + if JuMP.constraint_object(group.constraints[1]).set isa JuMP.MOI.EqualTo + "= " + elseif JuMP.constraint_object(group.constraints[1]).set isa JuMP.MOI.LessThan + "\\leq " + elseif JuMP.constraint_object(group.constraints[1]).set isa JuMP.MOI.GreaterThan + "\\geq " + else + "[ERRSIGN] " + end + ) + + rhs = "$(JuMP.normalized_rhs(group.constraints[1]))" + + return "\\begin{dmath}[style={\\eqmargin=0pt}]\n" * lhs * sign * rhs * "\n\\end{dmath}" +end + +function _constr_group_alike_tostring(group) + greeks = [ + "\\alpha", + "\\beta", + "\\gamma", + "\\delta", + "\\epsilon", + "\\zeta", + "\\eta", + "\\theta", + "\\iota", + "\\kappa", + "\\lambda", + ] + + # Extract coefficients for each variable + idx_first = min(group.indices...) + idx_last = max(group.indices...) + + coeffs = Dict( + group.indices[i] => Dict( + ( + occursin("[", JuMP.name(term[2])) ? split(JuMP.name(term[2]), "[")[1] : JuMP.name(term[2]), + occursin("[", JuMP.name(term[2])) ? + parse(Int64, split(JuMP.name(term[2]), "[")[2][1:(end - 1)]) - group.indices[i] : 0, + ) => term for term in JuMP.linear_terms(JuMP.constraint_object(group.constraints[i]).func) + ) for i in eachindex(group.constraints) + ) + + expr = [] + greek_range = [] + for (k, v) in coeffs[idx_first] + all_equal = false + if v[1] ≈ coeffs[idx_last][k][1] + # The first and last coefficient are the same. Check if all coefficients are the same. + if allequal(elem[k][1] for elem in values(coeffs)) + all_equal = true + end + end + + if all_equal + push!(expr, (_escape_variable(String(JuMP.name(v[2])), idx_first), v[1])) + else + push!(expr, (_escape_variable(String(JuMP.name(v[2])), idx_first), "$(greeks[length(greek_range) + 1])")) + # todo: this gets triggered for decisions that are connected to an "availability_factor", but this is not printed correctly + push!(greek_range, "[$(round(v[1], digits=4)),\\dots,$(round(coeffs[idx_last][k][1], digits=4))]") + end + end + + lhs = _expr_tostring(expr) + + sign = ( + if JuMP.constraint_object(group.constraints[1]).set isa JuMP.MOI.EqualTo + "= " + elseif JuMP.constraint_object(group.constraints[1]).set isa JuMP.MOI.LessThan + "\\leq " + elseif JuMP.constraint_object(group.constraints[1]).set isa JuMP.MOI.GreaterThan + "\\geq " + else + "[ERRSIGN] " + end + ) + + parameters = [] + + rhs = round.(JuMP.normalized_rhs.(group.constraints), digits=4) + if allequal(rhs) + rhs = "$(JuMP.normalized_rhs(group.constraints[1]))" + else + push!(parameters, "\\vb{b} = [$(rhs[1]),\\dots,$(rhs[end])]") + rhs = "\\vb{b}_t" + end + + for i in eachindex(greek_range) + push!(parameters, "$(greeks[i]) = $(greek_range[i])") + end + + ret = "\\begin{dmath}[style={\\eqmargin=0pt}]\n" * lhs * sign * rhs * "\n\\end{dmath}" + + if length(parameters) > 0 + for param in parameters + ret *= "\\begin{dmath*}[style={\\eqmargin=0pt}]\n" * param * "\n\\end{dmath*}" + end + end + + return ret +end + +function _constr_group_tostring(group) + ret = "" + if group.similarity === :equal + ret = _constr_group_equal_tostring(group) + elseif group.similarity === :alike + ret = _constr_group_alike_tostring(group) + else + @error " SOME ERR " + end + + return ret +end + +function _vars_tostring(variable_list, parent::String; digits=4) + !haskey(variable_list, parent) && return "" + + str = "" + for (k, var) in variable_list[parent] + str *= "\\begin{dmath}[style={\\eqmargin=0pt}]\n{" + escaped_var_name = "\\vb{" * replace(String(var.name), "_" => "\\_") * "}" + indices = "" + if length(var.indices) > 0 + escaped_var_name *= "_t" + indices = "\\forall t \\in \\{$(min(var.indices...)), \\dots, $(max(var.indices...))\\}" + end + + if !isnothing(var.upper_bound) + # If it has an `upper_bound`, we look for a style of `x <= 10`. + escaped_var_name *= " \\leq $(round(var.upper_bound; digits=digits)) " + if !isnothing(var.lower_bound) + # And if it has both, we extend to `0 <= x <= 10`. + escaped_var_name = " $(round(var.lower_bound; digits=digits)) \\leq " * escaped_var_name + end + elseif !isnothing(var.lower_bound) + # If no `upper_bound` exists, we present the `lower_bound` as `x >= 0`. + escaped_var_name *= " \\geq $(round(var.lower_bound; digits=digits)) " + end + + str *= escaped_var_name + + if var.is_binary || var.is_integer || var.is_fixed + str *= "\\qquad (" + str *= var.is_binary ? "\\in Bin," : "" + str *= var.is_integer ? "\\in Int," : "" + str *= var.is_fixed ? (length(var.fix_values) == 1 ? "some \\in Fix," : "\\in Fix,") : "" + str = str[1:(end - 1)] + str *= ")" + end + + str *= "\\qquad $indices" + str *= "}\n\\end{dmath}\n\n" + end + + return str * "\n" +end + +function _obj_tostring(model::JuMP.Model; digits=4) + terms = JuMP.linear_terms(JuMP.objective_function(model)) + + grouped_terms = Dict{String, Tuple{Vector{Int64}, Vector{Float64}}}() + for term in terms + bn = JuMP.name(term[2]) + + if length(bn) == 0 + if !haskey(grouped_terms, "slack") + grouped_terms["slack"] = ([0], [term[1]]) + else + push!(grouped_terms["slack"][1], 0) + push!(grouped_terms["slack"][2], term[1]) + end + else + if occursin('[', bn) + _idx = findfirst("[", bn)[1] + + index = parse(Int64, bn[(_idx + 1):(end - 1)]) + bn = bn[1:(_idx - 1)] + else + # No time-index, therefore a Decision. + index = 0 + end + if !haskey(grouped_terms, bn) + grouped_terms[bn] = ([index], [term[1]]) + else + push!(grouped_terms[bn][1], index) + push!(grouped_terms[bn][2], term[1]) + end + end + end + + cost_count = 1 + cost_values = "" + obj_str = "" + for (varname, info) in grouped_terms + varname == "slack" && continue + + sign = (info[2][1] > 0) ? "+" : "-" + if length(info[1]) == 1 + # Single value. + obj_str *= + "{$(sign) " * string(abs(round(info[2][1]; digits=4))) * "\\cdot" * _escape_variable(varname, 0) * "}" + elseif length(info[1]) == length(_iesopt(model).model.T) + # Full length. + if allequal(info[2]) + obj_str *= + "{$(sign) " * + string(abs(round(info[2][1]; digits=4))) * + " \\cdot \\sum_{t \\in T}" * + _escape_variable("$(varname)[1]", 1) * + "}" + else + obj_str *= + "{+ " * + "\\sum_{t \\in T} \\vb{cost}_{$(cost_count),t} \\cdot" * + _escape_variable("$(varname)[1]", 1) * + "}" + + rng = (min(info[2]...), max(info[2]...)) + cost_values *= "\n\\begin{dmath}[style={\\eqmargin=0pt}]\n{\\vb{cost}_{$(cost_count),t} \\in [$(rng[1]),$(rng[2])] \\qquad \\forall t \\in T}\n\\end{dmath}" + + cost_count += 1 + end + else + # Partial length. + if allequal(info[2]) + obj_str *= + "{$(sign) " * + string(abs(round(info[2][1]; digits=4))) * + "\\cdot \\sum_{t \\in S \\subset T}" * + _escape_variable("$(varname)[1]", 1) * + "}" + else + obj_str *= + "{+ " * + "\\sum_{t \\in S \\subset T} \\vb{cost}_{$(cost_count),t} \\cdot" * + _escape_variable("$(varname)[1]", 1) * + "}" + + rng = (min(info[2]...), max(info[2]...)) + cost_values *= "\n\\begin{dmath}[style={\\eqmargin=0pt}]\n{\\vb{cost}_{$(cost_count),t} \\in [$(rng[1]),$(rng[2])] \\qquad \\forall t \\in S \\subset T}\n\\end{dmath}" + + cost_count += 1 + end + end + obj_str *= "\\\\" + end + + if haskey(grouped_terms, "slack") + if allequal(grouped_terms["slack"][2]) + penalty = grouped_terms["slack"][2][1] + obj_str *= "{+ $(penalty) \\cdot \\sum_{i \\in I, t \\in T} \\vb{slack}_{i,t}}\\\\" + else + obj_str *= "{+ \\sum_{i \\in I, t \\in T} \\vb{cost}_{slack,i} \\cdot \\vb{slack}_{i,t}}\\\\" + + rng = (min(grouped_terms["slack"][2]...), max(grouped_terms["slack"][2]...)) + cost_values *= "\n\\begin{dmath}[style={\\eqmargin=0pt}]\n{\\vb{cost}_{slack} = [$(rng[1]),\\dots,$(rng[2])]}\n\\end{dmath}" + end + end + + obj_str = "{" * obj_str[3:(end - 2)] + + return "\\begin{dmath}[style={\\eqmargin=0pt}]\n\\vb{\\min}\\\\" * obj_str * "\n\\end{dmath}" * cost_values +end + +function _int2timeidx(t::Int64) + if t == 0 + return "t" + elseif t > 0 + return "{t+$t}" + else + return "{t-$(abs(t))}" + end +end + +function _escape_variable(str::String, base_t; fixed_t=false) + index = "" + if occursin("[", str) + str, index = split(str, "[") + index = index[1:(end - 1)] + end + str = replace(str, "_" => "\\_") + + if length(str) == 0 + return "\\vb{slack}_{?}" + end + + if occursin('.', str) + str, var = rsplit(str, "."; limit=2) + else + var = "?" + end + + if index == "" + return "\\vb{$str}_{\\vb{$var}}" + end + + if fixed_t + return "\\vb{$str}_{\\vb{$var}_{$index}}" + end + + index = _int2timeidx(parse(Int64, index) - base_t) + return "\\vb{$str}_{\\vb{$var}_{$index}}" +end + +function _expr_tostring(expr::Vector; digits=4) + ret = "" + + for i in eachindex(expr) + var = expr[i][1] + coeff = expr[i][2] + + if coeff isa Number + if coeff ≈ 1.0 + coeff = "+ {" + elseif coeff ≈ -1.0 + coeff = "- {" + else + if coeff > 0 + coeff = "+ {$(round(abs(coeff); digits=digits))\\cdot " + else + coeff = "- {$(round(abs(coeff); digits=digits))\\cdot " + end + end + else + coeff = "+ {$coeff\\cdot " + end + + ret *= "$(coeff)$(var)} " + end + + if ret[1:2] == "+ " + return ret[3:end] + end + if ret[1:2] == "- " + return "{-~" * ret[4:end] + end + + return ret +end + +# _test_epxr = [("x_1", 1.0), ("x_2", -1.0), ("y", "α")] +# print(_expr_tostring(_test_epxr)) diff --git a/src/texify/texify.jl b/src/texify/texify.jl new file mode 100644 index 0000000..c62d0dc --- /dev/null +++ b/src/texify/texify.jl @@ -0,0 +1,99 @@ +include("variables.jl") +include("constraints.jl") +include("print.jl") + +# todo: this currently cannot handle multi-dimensional var +# todo: this currently can not handle variables where some timesteps are fixed (e.g. due to initial conditions; add this) + +import Tectonic + +function texify(model; filename::String, component::String="") + prefix = component + + tex = "" + tex *= "\\documentclass[8pt,fleqn]{extarticle}\n" + tex *= "\\usepackage[a4paper, margin=1in]{geometry}\n\n" + tex *= "\\usepackage{physics,amsmath,breqn}\n\\usepackage[theorems]{tcolorbox}\n\n" + tex *= "\\setlength\\parindent{0pt}\n\n\\newtcolorbox{constrbox}[1][]{colback=white, #1}\n\n" + tex *= "\\begin{document}\n\n" + if prefix == "" + tex *= "This is a full model summary.\\\\~\\\\\n\n" + else + tex *= "This is a model summary of \\textbf{$prefix}.\\\\~\\\\\n\n" + end + tex *= "\\tableofcontents\n\n" + + tex *= "%\n%\n%\n" + tex *= "\\clearpage \\section{Objective function}\n\n" * _obj_tostring(model) + + # todo: this is type-unstable, see JuMP documentation, which means we could optimize this + constraints = [ + constraint for constraint in JuMP.all_constraints(model; include_variable_in_set_constraints=false) if + startswith(JuMP.name(constraint), prefix) + ] + + # Sort components by name. + components = _iesopt(model).model.components + sorted_comp_names = sort(keys(components)) + + # Prepare all variables. + variable_list = _describe_variables(model; prefix=prefix) + + for cname in sorted_comp_names + component = components[cname] + escaped_comp_name = replace(cname, "_" => "\\_") + + tex *= "%\n%\n%\n" + tex *= "\\clearpage \\section{Sub-component: \\textbf{$escaped_comp_name}}\n\n" + tex *= "\\subsection{Variables}\n" + tex *= _vars_tostring(variable_list, cname) + tex *= "\n\n" + tex *= "\\subsection{Constraints}\n" + + groups = _group_constraints([c for c in constraints if startswith(JuMP.name(c), cname)]) + descr = Dict{Symbol, Vector}() + for (group, val) in groups + constr = _parse_base_name(group)[2] + if !haskey(descr, constr) + descr[constr] = [] + end + push!(descr[constr], val) + end + sorted_descr_keys = sort([it for it in keys(descr)]) + + for k in sorted_descr_keys + for g in descr[k] + escaped_constraint_name = replace(String(k), "_" => "\\_") + + if length(g.constraints) == 1 + if isnothing(g.indices[1]) + tex *= "\\textit{$escaped_constraint_name}\n" + else + tex *= "\\textit{$escaped_constraint_name} | \$t = $(g.indices[1])\$\n" + end + tex *= _constr_group_equal_tostring(g; fixed_t=true) + tex *= "\n\n" + else + tex *= "\\textit{$escaped_constraint_name} | \$\\forall t \\in \\{$(g.indices[1]),\\dots,$(g.indices[end])\\}\$\n" + tex *= _constr_group_tostring(g) + tex *= "\n\n" + end + + tex *= "\\vspace{2em}" + end + end + end + + tex *= "\n\n\\end{document}" + + folder = dirname(filename) + tempname = joinpath(folder, "_tmp_texify") + + open("$(tempname).tex", "w") do file + return write(file, tex) + end + Tectonic.tectonic() do bin + Tectonic.run(`$bin $(tempname).tex --chatter minimal`) + return mv("$(tempname).pdf", filename; force=true) + end +end diff --git a/src/texify/variables.jl b/src/texify/variables.jl new file mode 100644 index 0000000..9a9e685 --- /dev/null +++ b/src/texify/variables.jl @@ -0,0 +1,77 @@ +struct _VariableInfo10 + name::Symbol + indices::Vector{Int64} + is_binary::Bool + is_integer::Bool + is_fixed::Bool + lower_bound::Union{Nothing, Float64} + upper_bound::Union{Nothing, Float64} + fix_values::Vector{Float64} + # todo: this assumes that a variable is either binary/integer/fixed for ALL indices or for NONE +end +_VariableInfo = _VariableInfo10 + +function _describe_variables(model::JuMP.Model; prefix::String="") + variables = JuMP.all_variables(model) + + parents = Dict{String, Dict{Symbol, _VariableInfo}}() + for var in variables + parent, name, idx = _parse_base_name(var) + !startswith(parent, prefix) && continue + + if !haskey(parents, parent) + parents[parent] = Dict{Symbol, _VariableInfo}() + end + + if !haskey(parents[parent], name) + parents[parent][name] = _VariableInfo( + name, + isnothing(idx) ? [] : [idx], + JuMP.is_binary(var), + JuMP.is_integer(var), + JuMP.is_fixed(var), + JuMP.has_lower_bound(var) ? JuMP.lower_bound(var) : nothing, + JuMP.has_upper_bound(var) ? JuMP.upper_bound(var) : nothing, + JuMP.is_fixed(var) ? [JuMP.fix_value(var)] : [], + ) + else + if !isnothing(idx) + push!(parents[parent][name].indices, idx) + end + if JuMP.is_fixed(var) + push!(parents[parent][name].fix_values, JuMP.fix_value(var)) + end + end + end + + return parents +end + +""" + _parse_base_name(var::JuMP.VariableRef; base_index::Int64 = 0) + +Parse the parent, the name, and the index from a `VariableRef`. `base_index` can be used to calculate the index as +offset based on a constraints specific time index. +""" +function _parse_base_name(var::JuMP.VariableRef; base_index::Int64=0) + name = JuMP.name(var) + + if name == "" + # This is an anonymous variable, which means its a slack variable created by `relax_with_penalty`. + return "", Symbol("_z"), 0 + end + + if occursin("[", name) + # This is a variable indexed by time. + name, idx = split(name, "[") + idx = (parse(Int64, idx[1:(end - 1)]) - base_index) + else + # This is a single variable (e.g. a Decision). + idx = nothing + end + + # The name is the right-most part, after the last `.`. + parent, name = rsplit(name, "."; limit=2) + + return parent, Symbol(name), idx +end diff --git a/src/utils/docs.jl b/src/utils/docs.jl new file mode 100644 index 0000000..086280a --- /dev/null +++ b/src/utils/docs.jl @@ -0,0 +1,134 @@ +function _parse_field_docstring(docstring::String) + # Split docstring into "spec" fields and "description". + rm = match(r"```(\{.*?\})```((?s).*)", docstring) + isnothing(rm) && return ret_error + length(rm.captures) == 2 || return ret_error + str_specs, str_descr = string.(rm.captures) + + # Parse specs as JSON, load description as string. + specs = JSON.parse(str_specs) + descr = replace(strip(str_descr), "\n" => " ") + + # Return specs (in correct order) and description. + return ([specs[s] for s in ["mandatory", "values", "default"]]..., descr) +end + +function _docs_struct_to_table(datatype::Type) + # Start table with proper header. + table_rows = [ + "| Name | Mandatory | Values | Default | Description |", + "|:-----|:----------|:-------|:--------|:------------|", + ] + + # Get proper binding from module, error if structure is unexpected. + binding = Base.Docs.aliasof(datatype, typeof(datatype)) + dict = Base.Docs.meta(binding.mod; autoinit=false) + isnothing(dict) && @critical "Doc error occured" datatype + haskey(dict, binding) || @critical "Doc error occured" datatype dict binding + multidoc = dict[binding] + haskey(multidoc.docs, Union{}) || @critical "Doc error occured" datatype multidoc.docs + + # Get all fields that have a docstring. + all_doc_fields = multidoc.docs[Union{}].data[:fields] + + # Get all fields in the order they are defined (`all_doc_fields` is an unordered dictionary). + all_fields = fieldnames(datatype) + + # Create a row for each field, properly splitting the docstring. + for field in all_fields + haskey(all_doc_fields, field) || continue + field_attrs = join(_parse_field_docstring(all_doc_fields[field]), " | ") + push!(table_rows, "| `$(field)` | $(field_attrs) |") + end + + # Join all rows to the string representation of the table and parse it to Markdown. + return Markdown.parse(join(table_rows, "\n")) +end + +function _docs_docstr_to_admonition(f::Function) + f_name = string(f) + obj_cc, obj_type, obj_name = string.(match(r"_([^_]+)_([^_]+)_(.*)!", f_name).captures) + obj_longtype = + Dict("var" => "variable", "exp" => "expression", "con" => "constraint", "obj" => "objective")[obj_type] + f_path = "ait-energy/IESopt.jl/tree/main/src/core/$(obj_cc)/$(obj_type)_$(obj_name).jl" + + header = """ + !!! tip "How to?" + Access this $(obj_longtype) by using: + ```julia + # Julia + component(model, "your_$(obj_cc)").$(obj_type).$(obj_name) + ``` + ```python + # Python + model.get_component("your_$(obj_cc)").$(obj_type).$(obj_name) + ``` + + You can find the full implementation and all details here: [`IESopt.jl`](https://github.com/$(f_path)). + """ + + docstr = string(Base.Docs.doc(f)) + docstr = replace(docstr, r"```(?s).*```" => header) + docstr = replace(docstr, "\n" => "\n ", "\$\$" => "```math") # TODO + + return """ + !!! details "$obj_name" + $(docstr) + """ +end + +function _docs_make_parameters(datatype::Type) + return """ + # Parameters + + $(_docs_struct_to_table(datatype)) + """ +end + +function _docs_make_model_reference(datatype::Type) + lc_type = lowercase(string(nameof(datatype))) + registered_names = string.(names(@__MODULE__; all=true, imported=false)) + valid_names = filter(n -> startswith(n, "_$(lc_type)_"), registered_names) + + var_names = filter(n -> startswith(n, "_$(lc_type)_var_"), valid_names) + exp_names = filter(n -> startswith(n, "_$(lc_type)_exp_"), valid_names) + con_names = filter(n -> startswith(n, "_$(lc_type)_con_"), valid_names) + obj_names = filter(n -> startswith(n, "_$(lc_type)_obj_"), valid_names) + + return """ + # Detailed Model Reference + + ## Variables + + $(join([_docs_docstr_to_admonition(getfield(@__MODULE__, Symbol(n))) for n in var_names], "\n\n")) + + ## Expressions + + $(join([_docs_docstr_to_admonition(getfield(@__MODULE__, Symbol(n))) for n in exp_names], "\n\n")) + + ## Constraints + + $(join([_docs_docstr_to_admonition(getfield(@__MODULE__, Symbol(n))) for n in con_names], "\n\n")) + + ## Objectives + + $(join([_docs_docstr_to_admonition(getfield(@__MODULE__, Symbol(n))) for n in obj_names], "\n\n")) + """ +end + +function _finalize_docstring(datatype::Type) + binding = Base.Docs.aliasof(datatype, typeof(datatype)) + multidoc = Base.Docs.meta(@__MODULE__)[binding] + old_data = multidoc.docs[Union{}].data + + multidoc.docs[Union{}] = Base.Docs.docstr(""" + $(Base.Docs.doc(datatype)) + + $(_docs_make_parameters(datatype)) + + $(_docs_make_model_reference(datatype)) + """) + multidoc.docs[Union{}].data = old_data + + return nothing +end diff --git a/src/utils/general.jl b/src/utils/general.jl new file mode 100644 index 0000000..28bc917 --- /dev/null +++ b/src/utils/general.jl @@ -0,0 +1,364 @@ +abstract type _CoreComponent end + +@kwdef struct _CoreComponentOptContainerDict{T <: Any} + dict::Dict{Symbol, T} = Dict{Symbol, T}() +end + +@kwdef struct _CoreComponentOptContainer + expressions = _CoreComponentOptContainerDict{Union{JuMP.AffExpr, Vector}}() + variables = _CoreComponentOptContainerDict{Union{JuMP.VariableRef, Vector}}() + constraints = _CoreComponentOptContainerDict{Union{JuMP.ConstraintRef, Vector}}() # TODO: this clashes with a more specific definition of `ConstraintRef` in JuMP + objectives = _CoreComponentOptContainerDict{JuMP.AffExpr}() +end + +""" + _AbstractVarRef + +Hold an unsolved reference onto a `JuMP.VariableRef`, that was not fully available before components were initialized. +""" +struct _AbstractVarRef + comp_name::String + field::Symbol +end + +""" + _PresolvedVarRef + +Hold a presolved reference onto a `JuMP.VariableRef`, that was not fully available before components were initialized. +""" +struct _PresolvedVarRef + comp::_CoreComponent + field::Function +end + +struct _AbstractAffExpr + constant::Float64 + variables::Vector{NamedTuple{(:coeff, :var), Tuple{Float64, _AbstractVarRef}}} +end + +struct _PresolvedAffExpr + constant::Float64 + variables::Vector{NamedTuple{(:coeff, :var), Tuple{Float64, _PresolvedVarRef}}} +end + +# _ID is used for all internal "ids". To ensure proper access, all ids need to be >= 1, unique and dense +const _ID = Int64 +# This defines the `_NumericalInput` type, that is used by IESopt for numerical input data (`Profile` values, ...) +const _NumericalInput = Union{Number, AbstractVector{<:Number}, _CoreComponent} # todo: this should be Expression (which we don't know here...) +# This defines the `_ScalarInput` type, a scalar (non-vector) type for numerical input data. +const _ScalarInput = Union{Number} +# A `_NumericalInput` type that allows passing `nothing`. +const _OptionalNumericalInput = Union{_NumericalInput, Nothing} +# A `_ScalarInput` type that allows passing `nothing`. +const _OptionalScalarInput = Union{_ScalarInput, Nothing} + +# A bound that can be given either by a `_NumericalInput` or defined based on variable in another core component. +const _Bound = Union{_NumericalInput, _AbstractAffExpr, _PresolvedAffExpr} +# A `_Bound` type that allows passing `nothing`. +const _OptionalBound = Union{_Bound, Nothing} + +# All strings in IESopt use this type. +const _String = AbstractString #CSV.InlineStrings.AbstractString +const _OptionalString = Union{_String, Nothing} + +_get(::Nothing) = nothing +_get(::Nothing, t::_ID) = nothing +_get(bound::_ScalarInput) = bound +_get(bound::_ScalarInput, t::_ID) = bound + +# todo: this is potentially unoptimized since the binding is not constant +# see: https://stackoverflow.com/a/34023458/5377696 +# this comment was related to: https://gitlab-intern.ait.ac.at/energy/commons/marketflow/core/-/blob/6d54998118a91b18b8d58846745be636890ec815/src/utils.jl#L54 +function _get(bound::_PresolvedAffExpr) + return sum(coeff * var.field(var.comp) for (coeff, var) in bound.variables; init=bound.constant) +end + +# todo: is there a case where decisions will be "per snapshot"? +function _get(bound::_PresolvedAffExpr, t::_ID) + return sum(coeff * var.field(var.comp, t) for (coeff, var) in bound.variables; init=bound.constant) +end + +function _safe_parse(::Type{Float64}, str::AbstractString) + ret = tryparse(Float64, str) + if !isnothing(ret) + return ret + end + return eval(Meta.parse(str)) +end + +""" +This is due to slow and expensive expression building in loops. For a description of this see [^1]; [^2] further +addresses this. The implementation is based on [^3] and [^4]. + +[^1]: https://discourse.julialang.org/t/jump-cplex-adding-objective-function-expression-in-loop-is-very-resource-consuming/21859/3 +[^2]: https://github.com/JuliaOpt/GSOC2019/blob/master/ideas-list.md#mutablearithmetics +[^3]: https://github.com/jump-dev/JuMP.jl/blob/f46a461c126fd1a7c309fb773a00b5dc529632b9/src/operators.jl#L304-L310 and +[^4]: https://github.com/Spine-project/SpineOpt.jl/blob/ba695b6af802286a36f6f97758ff99cd5c324f94/src/util/misc.jl#L85-L93 +""" +function _affine_expression(elements; init::Float64=0.0) + if isa(elements, Base.Generator) + elements + end + + if isa(init, Number) + expr = JuMP.AffExpr(init) + else + expr = zero(T) + expr += init + end + isempty(elements) && return expr + expr += first(elements) + for element in Iterators.drop(elements, 1) + JuMP.add_to_expression!(expr, element) + end + + return expr +end + +""" + _get(numerical_input::_NumericalInput, t::UInt) + +Get the value of the `numerical_input` at time (snapshot index) `t`. +""" +# _get(x::Number, t::UInt) = x # this is already done above (see `ScalarInput`) +_get(x::AbstractVector{<:Number}, t::_ID) = x[t] +_get(x::JuMP.AffExpr, t::_ID) = x +_get(x::Vector{JuMP.AffExpr}, t::_ID) = x[t] + +function _getfile(model::JuMP.Model, filename::String; path::Symbol=:auto, sink=DataFrames.DataFrame, slice::Bool=true) + if endswith(filename, ".csv") + path = path === :auto ? :files : path + filepath = abspath(getfield(_iesopt_config(model).paths, path), filename) + return _getcsv(model, filepath; sink=sink, slice=slice) + elseif endswith(filename, ".jl") + path = path === :auto ? :addons : path + core_addon_dir = _PATHS[:addons] + filepath_local = abspath(getfield(_iesopt_config(model).paths, path), filename) + filepath_core = abspath(core_addon_dir, filename) + + if isfile(filepath_local) + @info "Trying to load local addon" filename source = filepath_local + return include(filepath_local) + elseif isfile(filepath_core) + @info "Trying to load core addon" filename source = filepath_core + return include(filepath_core) + else + @critical "Failed to find addon location" filename filepath_local filepath_core + end + end +end + +""" + _getcsv(filename::String; sep::String=",") + +Read a CSV into a DataFrame. +""" +function _getcsv( + model::JuMP.Model, + filename::String; + sep::String=",", + dec::Char='.', + sink=DataFrames.DataFrame, + slice::Bool, +) + @info "Trying to load CSV" filename + + # Read the entire file. CSV.jl's `skipto` only makes it worse. + # See: https://github.com/JuliaData/CSV.jl/issues/959 + table = CSV.read(filename, sink; delim=sep, stringtype=String, decimal=dec) + + # If we are not slicing we return the whole table + slice || return table + + # Get some snapshot config parameters + offset = _iesopt_config(model).optimization.snapshots.offset + aggregation = _iesopt_config(model).optimization.snapshots.aggregate + + # Offset and aggregation don't work together. + if !isnothing(aggregation) && offset != 0 + @critical "Snapshot aggregation and non-zero offsets are currently not supported" + end + + # Get the number of table rows and and the model's snapshot count + nrows = size(table, 1) + count = _iesopt_config(model).optimization.snapshots.count + + # Get the range of table rows we want to return. + # Without snapshot aggregation we can return the rows specified by offset and count. + # Otherwise, we start at 1 and multiply the number of rows to return by the number of snapshots to aggregate. + from, to = isnothing(aggregation) ? (offset + 1, offset + count) : (1, count * aggregation) + + # Check if the range of rows is in bounds. + if from < 1 || to > nrows || from > to + @critical "Trying to access data with out-of-bounds or empty range" filename from to nrows + end + + return table[from:to, :] +end + +function _getfromcsv(model::JuMP.Model, file::String, column::String) + haskey(_iesopt(model).input.files, file) || (@critical "File not properly registered" column file) + return @view _iesopt(model).input.files[file][_iesopt(model).model.T, column] +end + +function _conv_S2NI(model::JuMP.Model, str::AbstractString) + # This handles pure values like "2.0". + val = tryparse(Float64, str) + !isnothing(val) && return val + + if isnothing(findfirst("@", str)) + # Check if this is a link to an Expression. + if haskey(_iesopt(model).model.components, str) + component = component(model, str) + error( + "You ended up in an outdated part of IESopt, which should not have happened. Please report this error including the model you are trying to run.", + ) + # todo: check this (which does not work currently since Expression is not defined at this point) + # if !(component isa Expression) + # @error "Non Expression component can not be converted to NumericalInput" str = str + # end + return component + else + # This handles calculations like "1/0.9". + return eval(Meta.parse(str)) + end + else + # This handles references to files like "column@data_file". + col, file = split(str, "@") + return collect(skipmissing(_iesopt(model).input.files[file][!, col])) + end +end + +# AbstractString is necessary for also accepting SubString + +function _presolve(model::JuMP.Model, data::_OptionalNumericalInput) + return data +end + +function _presolve(model::JuMP.Model, data::_AbstractAffExpr) + # For `var` as one of the variables: + # `var.field` contains the "variable" that we want to query. That is e.g. "value". The previous call to + # `_conv_S2AbstractVarRef` converted that into `:_value` which we query from IESopt (which is a function + # that can be used to extract the value of a Decision component). + return _PresolvedAffExpr( + data.constant, + [ + (coeff=coeff, var=_PresolvedVarRef(component(model, var.comp_name), getfield(IESopt, var.field))) for + (coeff, var) in data.variables + ], + ) +end + +# Needed to safely parse booleans (like "not true") that can result from parameter replacements. +function _parse_bool(model::JuMP.Model, str::String)::Bool + if !_has_cache(model, :parse_bool) + _iesopt_cache(model)[:parse_bool] = + Dict{String, Bool}("true" => true, "false" => false, "not true" => false, "not false" => true) + end + + _is_cached(model, :parse_bool, str) && return _get_cached(model, :parse_bool, str) + + try + parsed = eval(_unknown_to_string(model, Meta.parse(str))) + _iesopt_cache(model)[:parse_bool][str] = parsed + return parsed + catch e + @critical "Cannot convert string to bool" str + end +end +_parse_bool(::JuMP.Model, b::Bool) = b + +_unknown_to_string(::JuMP.Model, x) = x +function _unknown_to_string(model::JuMP.Model, ex::Expr) + return Expr(ex.head, (_unknown_to_string(model::JuMP.Model, arg) for arg in ex.args)...) +end +function _unknown_to_string(model::JuMP.Model, sym::Symbol)::Union{Symbol, String} + if !_has_cache(model, :module_names) + _iesopt_cache(model)[:module_names] = union(Set{Symbol}(names(Core)), names(Base), names(IESopt)) + end + + sym in _get_cache(model, :module_names) && return sym + return string(sym) +end + +function _base_name(comp::_CoreComponent, str::String) + !JuMP.set_string_names_on_creation(comp.model) && return "" + return "$(comp.name).$str" +end + +# These are used in Benders and Stochastic. +_obj_type(::Any) = :other +_obj_type(::JuMP.VariableRef) = :var +_obj_type(::Vector{JuMP.VariableRef}) = :var +_obj_type(::JuMP.Containers.DenseAxisArray{JuMP.VariableRef}) = :var +_obj_type(::JuMP.Containers.SparseAxisArray{JuMP.VariableRef}) = :var +function _print_iteration(k, args...) + _format = (val) -> Printf.@sprintf("%12.4e", val) + return println(lpad(k, 7), " |", join(_format.(args), " |")) +end + +_allmissing(::Missing) = true +_allmissing(x) = all(ismissing, x) +_anymissing(::Missing) = true +_anymissing(x) = any(ismissing, x) +_allval(x) = !_anymissing(x) +_anyval(x) = !_allmissing(x) + +function _add_obj_term!(model::JuMP.Model, term::String; component::String, objective::String) + return _add_obj_term_from_str!(model, Meta.parse(term); component=component, objective=objective) +end + +function _add_obj_term!(model::JuMP.Model, term::Number; component::String, objective::String) + push!(_iesopt(model).aux._obj_terms[objective], float(term)) + return nothing +end + +function _add_obj_term_from_str!(model::JuMP.Model, parsed_term::Expr; component::String, objective::String) + try + push!(_iesopt(model).aux._obj_terms[objective], eval(parsed_term)) + catch error + @critical "Failed to evaluate objective term" component objective error + end + return nothing +end + +function _add_obj_term_from_str!(model::JuMP.Model, parsed_term::Symbol; component::String, objective::String) + push!(_iesopt(model).aux._obj_terms[objective], "$(component).$(parsed_term)") + return nothing +end + +function _profiling_get_top(model::JuMP.Model, n::Int64; mode::Symbol=:time, groupby::Symbol=:function) + prof = _iesopt(model).aux._profiling + + if groupby == :function + data = prof + elseif groupby == :module + groups = Set(it[1] for it in keys(prof)) + data = Dict( + g => _Profiling( + sum(v.time for (k, v) in prof if k[1] == g), + sum(v.bytes for (k, v) in prof if k[1] == g), + sum(v.calls for (k, v) in prof if k[1] == g), + ) for g in groups + ) + elseif groupby == :file + groups = Set(it[1:2] for it in keys(prof)) + data = Dict( + g => _Profiling( + sum(v.time for (k, v) in prof if k[1:2] == g), + sum(v.bytes for (k, v) in prof if k[1:2] == g), + sum(v.calls for (k, v) in prof if k[1:2] == g), + ) for g in groups + ) + end + + return sort(collect(data); by=x -> getfield(x[2], mode), rev=true)[1:(min(n, length(data)))] +end + +function _profiling_format_top(model::JuMP.Model, n::Int64=5; mode::Symbol=:time) + data = _profiling_get_top(model, n; mode=mode, groupby=:function) + return OrderedDict( + Symbol("func: $(it.first[3]) @ $(splitpath(it.first[2])[end]) ($(it.first[1]))") => getfield(it.second, mode) + for it in data + ) +end diff --git a/src/utils/logging.jl b/src/utils/logging.jl new file mode 100644 index 0000000..8c5d509 --- /dev/null +++ b/src/utils/logging.jl @@ -0,0 +1,90 @@ +struct FileLogger <: AbstractLogger + logger::Logging.SimpleLogger +end + +function FileLogger(path::String) + return FileLogger(Logging.SimpleLogger(open(path, "w"))) +end + +function Logging.handle_message(filelogger::FileLogger, args...; kwargs...) + Logging.handle_message(filelogger.logger, args...; kwargs...) + return flush(filelogger.logger.stream) +end +Logging.shouldlog(filelogger::FileLogger, arg...) = true +Logging.min_enabled_level(filelogger::FileLogger) = Logging.Info +Logging.catch_exceptions(filelogger::FileLogger) = Logging.catch_exceptions(filelogger.logger) + +function _attach_logger!(model::JuMP.Model) + verbosity = _iesopt_config(model).verbosity + + logger = ( + if verbosity == "warning" + Logging.ConsoleLogger(Logging.Warn; meta_formatter=_new_metafmt) + elseif verbosity == true + Logging.ConsoleLogger(Logging.Info; meta_formatter=_new_metafmt) + elseif verbosity == false + Logging.ConsoleLogger(Logging.Error; meta_formatter=_new_metafmt) + else + @warn "Unsupported `verbosity` config. Choose from `true`, `false` or `warning`. Falling back to `true`." verbosity = + verbosity + Logging.ConsoleLogger(Logging.Info; meta_formatter=_new_metafmt) + end + ) + + if _iesopt_config(model).optimization.high_performance + _iesopt(model).logger = logger + else + log_file = "$(_iesopt_config(model).names.scenario).log" + log_path = normpath(mkpath(_iesopt_config(model).paths.results), log_file) + _iesopt(model).logger = LoggingExtras.TeeLogger(logger, FileLogger(log_path)) + end +end + +# Based on `default_metafmt` from ConsoleLogger.jl +function _new_metafmt(level::LogLevel, _module, group, id, file, line) + @nospecialize + PAD = 25 + + level_str = lowercase(level == Warn ? "Warning" : string(level)) + abspath_core = abspath(dirname(dirname(dirname(@__FILE__)))) + abspath_file = abspath(file) + + if startswith(abspath_file, abspath_core) + file = relpath(abspath_file, abspath_core) + depth = splitpath(file) + + if depth[1] == "src" + depth_prefix = replace(join(depth[2:end], "|"), ".jl" => "") + elseif depth[1] == "addons" + depth_prefix = "addon ($(replace(depth[end], ".jl" => "")))" + else + depth_prefix = "unknown ($(replace(depth[end], ".jl" => "")))" + end + else + depth = splitpath(file) + depth_prefix = "custom ($(replace(depth[end], ".jl" => "")))" + end + + max_depth_prefix_length = PAD - length(level_str) - length(" @ ") - length(" ~") + if length(depth_prefix) > max_depth_prefix_length + depth_prefix = depth_prefix[1:(max_depth_prefix_length - 3)] * "..." + end + + color = Logging.default_logcolor(level) + prefix = rpad("$(level_str) @ $(depth_prefix) ~", PAD) + suffix::String = "" + + Info <= level < Warn && return color, prefix, suffix + + _module !== nothing && (suffix *= string(_module)::String) + if file !== nothing + _module !== nothing && (suffix *= " ") + suffix *= contractuser(file)::String + if line !== nothing + suffix *= ":$(isa(line, UnitRange) ? "$(first(line))-$(last(line))" : line)" + end + end + !isempty(suffix) && (suffix = "@ " * suffix) + + return color, prefix, suffix +end diff --git a/src/utils/overview.jl b/src/utils/overview.jl new file mode 100644 index 0000000..1f7ed4a --- /dev/null +++ b/src/utils/overview.jl @@ -0,0 +1,51 @@ +""" + overview(file::String) + +Extracts the most important information from an IESopt model file, and returns it as a dictionary. +""" +function overview(file::String) + if endswith(file, ".iesopt.yaml") + @critical "Only single-file IESopt models are supported by `overview` (can be created using `pack(...)`" file + elseif !endswith(file, ".iesopt") + @critical "Unsupported file format" file + end + + @info "Unpacking IESopt model" + info = unpack(file) + root_path = dirname(info["config"]) + + config = YAML.load_file(info["config"]; dicttype=OrderedDict{String, Any}) + + summary = Dict{String, Any}( + "version" => info["version"], + "path" => root_path, + "tlc_filename" => info["config"], + "files" => info["files"], + ) + + if haskey(config, "parameters") + if config["parameters"] isa String + summary["parameter_type"] = "file: $(config["parameters"])" + summary["parameter_value"] = + YAML.load_file(normpath(root_path, config["parameters"]); dicttype=Dict{String, Any}) + elseif config["parameters"] isa Dict + summary["parameter_type"] = "dict" + summary["parameter_value"] = config["parameters"] + else + summary["parameter_type"] = "unknown" + summary["parameter_value"] = Dict() + end + else + summary["parameter_type"] = "empty" + summary["parameter_value"] = Dict() + end + + summary["config"] = Dict{String, Any}() + for entry in ["name", "optimization", "files", "paths", "results"] + summary["config"][entry] = get(config["config"], entry, Dict()) + end + + summary["carriers"] = get(config, "carriers", Dict()) + + return summary +end diff --git a/src/utils/packing.jl b/src/utils/packing.jl new file mode 100644 index 0000000..0d04da4 --- /dev/null +++ b/src/utils/packing.jl @@ -0,0 +1,264 @@ +""" + pack(file::String; out::String="", method=:store) + +Packs the IESopt model specified by the top-level config file `file` into single file. + +The `out` argument specifies the output file name. If not specified, a temporary file is created. Returns the output +file name. The `method` argument specifies the compression method to use. The default is `:store`, which means no +compression is used. The other option is `:deflate`, which uses the DEFLATE compression method. The default (`:auto`) +applies `:store` to all files below 1 MB, `:deflate` otherwise. +""" +function pack(file::String; out::String="", method::Symbol=:auto, include_results::Bool=false) + root_path = dirname(normpath(file)) + + zipfile_method = method == :store ? ZipFile.Store : ZipFile.Deflate + zipfile_name = normpath(isempty(out) ? tempname() : normpath(root_path, out)) + if count(==('.'), zipfile_name) > 1 + @error "Output filename contains invalid special characters (`.`), which can lead to errors" zipfile_name + zipfile_name = split(zipfile_name, "."; limit=2)[1] * ".iesopt" + else + zipfile_name = rsplit(zipfile_name, "."; limit=2)[1] * ".iesopt" + end + + toplevel_config = YAML.load_file(file; dicttype=OrderedDict{String, Any}) + + paths = [] + if haskey(toplevel_config["config"], "paths") + paths_dict = toplevel_config["config"]["paths"] + for entry in ["files", "templates", "components", "addons", "results"] + haskey(paths_dict, entry) && push!(paths, abspath(root_path, paths_dict[entry])) + end + end + + toplevel_config_relname = relpath(file, root_path) + info = Dict( + "version" => string(pkgversion(@__MODULE__)), + "config" => toplevel_config_relname, + "files" => Dict{String, Vector{String}}( + "configs" => Vector{String}(), + "parameters" => Vector{String}(), + "data" => Vector{String}(), + "addons" => Vector{String}(), + "templates" => Vector{String}(), + "results" => Vector{String}(), + ), + ) + + files = Set() + push!(files, toplevel_config_relname) + haskey(toplevel_config, "supplemental") && push!(files, toplevel_config["supplemental"]) + + n_files_skipped = 0 + n_files_valid = 0 + for (root, _, filenames) in walkdir(root_path) + for filename in filenames + if endswith(filename, ".iesopt.yaml") + if relpath(normpath(root, filename), root_path) == filename + push!(files, relpath(normpath(root, filename), root_path)) + push!(info["files"]["configs"], relpath(normpath(root, filename), root_path)) + else + @warn "Encountered \"top-level config\" file outside top-level directory, skipping" filename + n_files_skipped += 1 + end + continue + end + + if !startswith(abspath(root, filename), Regex(join(paths, "|"))) + @debug "NOT packing file" file = normpath(root, filename) + n_files_skipped += 1 + continue + end + + if endswith(filename, ".csv") + push!(files, relpath(normpath(root, filename), root_path)) + push!(info["files"]["data"], relpath(normpath(root, filename), root_path)) + elseif endswith(filename, ".jl") + push!(files, relpath(normpath(root, filename), root_path)) + push!(info["files"]["addons"], relpath(normpath(root, filename), root_path)) + elseif endswith(filename, ".iesopt.template.yaml") + push!(files, relpath(normpath(root, filename), root_path)) + push!(info["files"]["templates"], relpath(normpath(root, filename), root_path)) + elseif endswith(filename, ".iesopt.param.yaml") + push!(files, relpath(normpath(root, filename), root_path)) + push!(info["files"]["parameters"], relpath(normpath(root, filename), root_path)) + elseif endswith(filename, ".mfres.jld2") && include_results + push!(files, relpath(normpath(root, filename), root_path)) + push!(info["files"]["results"], relpath(normpath(root, filename), root_path)) + else + @debug "NOT packing file" file = normpath(root, filename) + n_files_skipped += 1 + continue + end + + @debug "Packing file" file = normpath(root, filename) + n_files_valid += 1 + end + end + + zipfile = ZipFile.Writer(zipfile_name) + write(ZipFile.addfile(zipfile, "__info__"; method=zipfile_method), JSON.json(info)) + for file in files + filepath = normpath(root_path, file) + zm = (zipfile_method == :auto) ? ((filesize(filepath) < 1e6) ? :store : :deflate) : zipfile_method + + open(filepath, "r") do f + return write(ZipFile.addfile(zipfile, file; method=zm), read(f)) + end + end + close(zipfile) + + @info "Successfully packed model description" output_file = zipfile_name packed_files = n_files_valid skipped_files = + n_files_skipped + + return zipfile_name +end + +""" + unpack(file::String; out::String="", force_overwrite::Bool=false) + +Unpacks the IESopt model specified by `file`. + +The `out` argument specifies the output directory. If not specified, a temporary directory is created. Returns the +path to the top-level config file. The `force_overwrite` argument specifies whether to overwrite existing files. +""" +function unpack(file::String; out::String="", force_overwrite::Bool=false) + output_path = isempty(out) ? tempname() : mkpath(abspath(out)) + file = abspath(file) + + info = nothing + n_skipped = 0 + n_written = 0 + + zarchive = ZipFile.Reader(file) + for file in zarchive.files + if endswith(file.name, "__info__") + info = JSON.parse(read(file, String)) + continue + end + + filepath = normpath(output_path, file.name) + + if isfile(filepath) && !force_overwrite + n_skipped += 1 + continue + end + + mkpath(dirname(filepath)) + write(filepath, read(file)) + n_written += 1 + end + close(zarchive) + + if string(pkgversion(@__MODULE__)) != info["version"] + @warn "You are trying to unpack an IESopt model that was created using a different version, which may not be compatible" detected = + info["version"] active = string(pkgversion(@__MODULE__)) + end + + info["config"] = normpath(output_path, info["config"]) + if n_skipped > 0 + @warn "Skipped $(n_skipped)/$(n_skipped + n_written) files; use `force_overwrite=true` to overwrite existing files" config = + info["config"] files_written = n_written + else + @info "Unpacked model description" config = info["config"] files_written = n_written + end + + return info +end + +# function pack(filename::String; out::String="") +# if isempty(out) +# out = tempname() +# end + +# root_path = dirname(normpath(filename)) +# toplevel_config = YAML.load_file(filename; dicttype=OrderedDict{String, Any}) + +# paths = [] +# if haskey(toplevel_config["config"], "paths") +# paths_dict = toplevel_config["config"]["paths"] +# for entry in ["files", "templates", "components", "addons"] +# haskey(paths_dict, entry) && push!(paths, abspath(root_path, paths_dict[entry])) +# end +# end + +# files = Set() +# haskey(toplevel_config, "parameters") && push!(files, toplevel_config["parameters"]) +# haskey(toplevel_config, "supplemental") && push!(files, toplevel_config["supplemental"]) +# for (root, _, filenames) in walkdir(root_path) +# for filename in filenames +# startswith(abspath(root, filename), Regex(join(paths, "|"))) || continue +# if endswith(filename, ".csv") || endswith(filename, ".iesopt.template.yaml") || endswith(filename, ".jl") +# filepath = normpath(root, filename) +# push!(files, relpath(filepath, root_path)) +# end +# end +# end +# files = collect(files) + +# nc = NCDatasets.NCDataset( +# out, +# "c"; +# attrib=OrderedDict("tlc_filename" => splitdir(filename)[2], "tlc_content" => read(filename)), +# ) + +# grp_files = NCDatasets.defGroup(nc, "files") + +# for i in eachindex(files) +# file = files[i] +# if endswith(file, ".csv") +# df = +# (CSV.File( +# normpath(root_path, file); +# stringtype=String, +# typemap=Dict(Bool => String, Int64 => Float64), +# )) |> DataFrames.DataFrame +# cols = names(df) + +# for j in eachindex(cols) +# value = collect(df[!, cols[j]]) +# value_type = string(eltype(value)) +# if occursin("String", value_type) +# fill_value = "" +# elseif occursin("Float", value_type) +# fill_value = NaN +# else +# @error "Unexpected type in CSV file" file value_type +# end +# NCDatasets.defVar( +# grp_files, +# "file_$(i)_col_$(j)", +# value, +# ("row_$(i)",); +# attrib=OrderedDict( +# "filename" => file, +# "type" => "csv", +# "column_name" => cols[j], +# "_FillValue" => fill_value, +# ), +# ) +# end +# elseif endswith(file, ".yaml") +# byte_data = read(normpath(root_path, file)) +# NCDatasets.defVar( +# grp_files, +# "file_$(i)", +# byte_data, +# ("byte_$(i)",); +# attrib=OrderedDict("filename" => file, "type" => split("files/25/global.iesopt.param.yaml", ".")[end - 1]), +# ) +# elseif endswith(file, ".jl") +# byte_data = read(normpath(root_path, file)) +# NCDatasets.defVar( +# grp_files, +# "file_$(i)", +# byte_data, +# ("byte_$(i)",); +# attrib=OrderedDict("filename" => file, "type" => "jl"), +# ) +# end +# end + +# close(nc) + +# return out +# end diff --git a/src/utils/utilities/Utilities.jl b/src/utils/utilities/Utilities.jl new file mode 100644 index 0000000..2725f4d --- /dev/null +++ b/src/utils/utilities/Utilities.jl @@ -0,0 +1,73 @@ +""" + Utilities + +This module contains utility functions for the IESopt package, that can be helpful in preparing or analysing components. +""" +module Utilities + +import ..IESopt +import ..IESopt: @critical +import ArgCheck: @argcheck +import JuMP + +include("model_wrapper.jl") + +""" + annuity(total::Number; lifetime::Number, rate::Float64, fraction::Float64) + +Calculate the annuity of a total amount over a lifetime with a given interest rate. + +# Arguments + +- `total::Number`: The total amount to be annuitized. + +# Keyword Arguments + +- `lifetime::Number`: The lifetime over which the total amount is to be annuitized. +- `rate::Float64`: The interest rate at which the total amount is to be annuitized. +- `fraction::Float64`: The fraction of a year that the annuity is to be calculated for (default: 1.0). + +# Returns + +`Float64`: The annuity of the total amount over the lifetime with the given interest rate. + +# Example + +Calculating a simple annuity, for a total amount of € 1000,- over a lifetime of 10 years with an interest rate of 5%: + +```julia +# Set a parameter inside a template. +set("capex", IESU.annuity(1000.0; lifetime=10, rate=0.05)) +``` + +Calculating a simple annuity, for a total amount of € 1000,- over a lifetime of 10 years with an interest rate of 5%, +for a fraction of a year (given by `MODEL.yearspan`, which is the total timespan of the model in years): + +```julia +# Set a parameter inside a template. +set("capex", IESU.annuity(1000.0; lifetime=10, rate=0.05, fraction=MODEL.yearspan)) +``` +""" +function annuity(total::Number; lifetime::Number, rate::Float64, fraction::Float64=1.0)::Float64 + @argcheck total >= 0 + @argcheck 0 < lifetime < 1e3 + @argcheck 0.0 < rate < 1.0 + @argcheck fraction > 0 + return total * rate / (1 - (1 + rate)^(-lifetime)) * fraction +end + +function annuity(total::Number, lifetime::Number, rate::Number) + msg = "Error trying to call `annuity($(total), $(lifetime), $(rate))`" + reason = "`lifetime` and `rate` must be passed as keyword arguments to `annuity(...)`" + example = "`annuity(1000.0; lifetime=10, rate=0.05)`" + @critical msg reason example +end + +function annuity(total::Number, lifetime::Number, rate::Number, fraction::Number) + msg = "Error trying to call `annuity($(total), $(lifetime), $(rate), $(fraction))`" + reason = "`lifetime`, `rate`, and `fraction` must be passed as keyword arguments to `annuity(...)`" + example = "`annuity($(total); lifetime=$(lifetime), rate=$(rate), fraction=$(fraction))`" + @critical msg reason example +end + +end diff --git a/src/utils/utilities/model_wrapper.jl b/src/utils/utilities/model_wrapper.jl new file mode 100644 index 0000000..64ca674 --- /dev/null +++ b/src/utils/utilities/model_wrapper.jl @@ -0,0 +1,25 @@ +""" + ModelWrapper + +Wraps an IESopt.jl model to expose various easy access functions in `IESopt.Utilities`, and Core Template functions. + +# Accessors: + +- `timespan::Float64`: The total timespan of the model, in hours. +- `yearspan::Float64`: The total timespan of the model, in years, based on `1 year = 8760 hours`. +""" +struct ModelWrapper + model::JuMP.Model +end + +function Base.getproperty(mw::ModelWrapper, property::Symbol) + if property == :model + return getfield(mw, :model) + elseif property == :timespan + return sum(s.weight for s in values(IESopt._iesopt_model(mw.model).snapshots)) + elseif property == :yearspan + return mw.timespan / 8760.0 + end + + @critical "Trying to access undefined property of (wrapped) model" property +end diff --git a/src/utils/utils.jl b/src/utils/utils.jl new file mode 100644 index 0000000..420e1c6 --- /dev/null +++ b/src/utils/utils.jl @@ -0,0 +1,31 @@ +macro critical(msg, args...) + return esc(quote + local message = string($msg) + @error message $(args...) + error(message) + end) +end + +function _try_loading_solver() + _try_import(solver::String) = IESopt.eval(Meta.parse("""try; import $(solver); true; catch; false; end;""")) + active = join([s for s in _ALL_SOLVER_INTERFACES if _try_import(s)], ", ") + + if _is_precompiling() + else + @info "Activating solver interfaces" active + end + + return active +end + +# Immediately try to load solver interfaces. +const ACTIVE_SOLVER_INTERFACES = _try_loading_solver() + +include("general.jl") +include("logging.jl") +include("packing.jl") +include("overview.jl") # makes use of unpacking (therefore included after packing.jl) +include("docs.jl") + +include("utilities/Utilities.jl") +const IESU = Utilities diff --git a/src/validation/addons/addons.jl b/src/validation/addons/addons.jl new file mode 100644 index 0000000..381f3ae --- /dev/null +++ b/src/validation/addons/addons.jl @@ -0,0 +1,18 @@ +function _validate_addon(filename::String) + valid = true + + content = "" + try + # TODO: Load the addon file + catch + return _vassert(false, "Could not load addon file"; filename=filename) + end + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occurred"; filename=filename, exception=exception) + end + + return valid +end diff --git a/src/validation/core/carrier.jl b/src/validation/core/carrier.jl new file mode 100644 index 0000000..0f62f66 --- /dev/null +++ b/src/validation/core/carrier.jl @@ -0,0 +1,11 @@ +function _validate_carriers(carriers::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="carriers") + end + + return valid +end diff --git a/src/validation/core/connection.jl b/src/validation/core/connection.jl new file mode 100644 index 0000000..e07fc3f --- /dev/null +++ b/src/validation/core/connection.jl @@ -0,0 +1,11 @@ +function _validate_connection(name::String, properties::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="Connection", name=name) + end + + return valid +end diff --git a/src/validation/core/core.jl b/src/validation/core/core.jl new file mode 100644 index 0000000..e66c7a7 --- /dev/null +++ b/src/validation/core/core.jl @@ -0,0 +1,34 @@ +include("carrier.jl") +include("connection.jl") +include("decision.jl") +include("node.jl") +include("profile.jl") +include("snapshot.jl") +include("template.jl") +include("unit.jl") + +function _validate_component(name::Any, properties::Dict) + valid = true + + valid &= _vassert(name isa String, "Component name must be a string"; component=name) + + type = get(properties, "type", "") + + if type == "" + valid &= _vassert(false, "Component is missing `type`"; component=name) + elseif type == "Connection" + valid &= _validate_connection(name, properties) + elseif type == "Decision" + valid &= _validate_decision(name, properties) + elseif type == "Node" + valid &= _validate_node(name, properties) + elseif type == "Profile" + valid &= _validate_profile(name, properties) + elseif type == "Unit" + valid &= _validate_unit(name, properties) + else + valid &= _validate_template_component(name, properties) + end + + return valid +end diff --git a/src/validation/core/decision.jl b/src/validation/core/decision.jl new file mode 100644 index 0000000..f7e58b8 --- /dev/null +++ b/src/validation/core/decision.jl @@ -0,0 +1,11 @@ +function _validate_decision(name::String, properties::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="Decision", name=name) + end + + return valid +end diff --git a/src/validation/core/node.jl b/src/validation/core/node.jl new file mode 100644 index 0000000..5b5c049 --- /dev/null +++ b/src/validation/core/node.jl @@ -0,0 +1,11 @@ +function _validate_node(name::String, properties::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="Node", name=name) + end + + return valid +end diff --git a/src/validation/core/profile.jl b/src/validation/core/profile.jl new file mode 100644 index 0000000..31e81e7 --- /dev/null +++ b/src/validation/core/profile.jl @@ -0,0 +1,11 @@ +function _validate_profile(name::String, properties::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="Profile", name=name) + end + + return valid +end diff --git a/src/validation/core/snapshot.jl b/src/validation/core/snapshot.jl new file mode 100644 index 0000000..4dda758 --- /dev/null +++ b/src/validation/core/snapshot.jl @@ -0,0 +1,11 @@ +function _validate_snapshots(carriers::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="snapshots") + end + + return valid +end diff --git a/src/validation/core/template.jl b/src/validation/core/template.jl new file mode 100644 index 0000000..d83b658 --- /dev/null +++ b/src/validation/core/template.jl @@ -0,0 +1,11 @@ +function _validate_template_component(name::String, properties::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location=properties["type"], name=name) + end + + return valid +end diff --git a/src/validation/core/unit.jl b/src/validation/core/unit.jl new file mode 100644 index 0000000..5820e9a --- /dev/null +++ b/src/validation/core/unit.jl @@ -0,0 +1,11 @@ +function _validate_unit(name::String, properties::Dict) + valid = true + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occured"; location="Unit", name=name) + end + + return valid +end diff --git a/src/validation/validation.jl b/src/validation/validation.jl new file mode 100644 index 0000000..2c7a4f5 --- /dev/null +++ b/src/validation/validation.jl @@ -0,0 +1,90 @@ +function _vassert(condition::Bool, message::String; kwargs...)::Bool + condition || (@error "Validation error: $(message)" kwargs...) + return condition +end + +include("core/core.jl") +include("yaml/yaml.jl") +include("addons/addons.jl") + +function validate(toplevel_config_file::String) + toplevel_config_file = abspath(toplevel_config_file) + model_path = dirname(toplevel_config_file) + + # TODO: Catch `high_performance` here manually and skip validation + valid = true + + valid &= _validate_raw_yaml(toplevel_config_file) + + # Try to continue with an "empty" configuration if the top-level config is invalid. + _top_level_config = valid ? YAML.load_file(toplevel_config_file; dicttype=Dict{String, Any}) : Dict{String, Any}() + + files_to_validate = [] + + # Add the specified parameters file (if one exists). + if haskey(_top_level_config, "parameters") && (_top_level_config["parameters"] isa String) + push!(files_to_validate, abspath(model_path, _top_level_config["parameters"])) + end + + # Extract configured paths or defaults. + paths = get(_top_level_config, "paths", Dict{String, Any}()) + folders = [ + replace(get(paths, "files", "files"), '\\' => '/'), + replace(get(paths, "templates", "templates"), '\\' => '/'), + replace(get(paths, "components", "components"), '\\' => '/'), + replace(get(paths, "addons", "addons"), '\\' => '/'), + ] + + # Find all relevant files in the specified folders. + for entry in folders + folder = abspath(model_path, entry) + for (root, _, files) in walkdir(folder) + for file in files + if endswith(file, ".iesopt.template.yaml") || endswith(file, ".jl") + push!(files_to_validate, abspath(root, file)) + end + + # TODO: Detect CSV component files, and implement validation. + end + end + end + + # Validate all found files. + for filename in files_to_validate + if endswith(filename, ".jl") + valid &= _validate_addon(filename) + else + valid &= _validate_raw_yaml(filename) + end + end + + return _vassert(valid, "Encountered error(s) while validating model description"; config=toplevel_config_file) +end + +# TODO: Collection of old comments regarding points to validate, see below. + +# if config.aggregate +# if haskey(config, "weights") || haskey(config, "offset") || haskey(config, "names") +# @error "Snapshot aggregation only supports setting `count`" +# end +# end + +# config/opt: get(config, "multiobjective", nothing) only with "MO/mo" in config + +# if isempty(config) +# @warn "No result section found in model config; consider specifying `results` before running the model" +# end + +# if !haskey(data, "config") +# @error "Missing `config` entry" +# return false +# end + +# parameters = pop!(data, "parameters", Dict()) +# if parameters isa String +# if !endswith(parameters, ".iesopt.param.yaml") +# @error "Wrong file supplied for global parameters, should end in `.iesopt.param.yaml" filename = parameters +# end +# end + +# adding "weight" to snapshots should produce an error => "weights" is the correct key, all "unused" keys should trigger a warning diff --git a/src/validation/yaml/iesopt.jl b/src/validation/yaml/iesopt.jl new file mode 100644 index 0000000..f5e65db --- /dev/null +++ b/src/validation/yaml/iesopt.jl @@ -0,0 +1,82 @@ +function _validate_raw_yaml_iesopt(filename::String) + valid = true + + content = Dict{String, Any}() + try + merge!(content, YAML.load_file(filename; dicttype=Dict{String, Any})) + catch + return _vassert(false, "Could not load YAML file"; filename=filename) + end + + try + # Check that mandatory entries in "config" exist. + valid &= _vassert(haskey(content, "config"), "Top-level config missing mandatory entry"; entry="config") + config = content["config"] + valid &= _vassert( + haskey(config, "optimization"), + "Top-level config missing mandatory entry"; + entry="config/optimization", + ) + valid &= _vassert( + haskey(config["optimization"], "problem_type"), + "Top-level config missing mandatory entry"; + entry="config/optimization/problem_type", + ) + + # Check carriers + valid &= _vassert(haskey(content, "carriers"), "Top-level config missing mandatory entry"; entry="carriers") + carriers = content["carriers"] + + # Check loading of components. + valid &= _vassert( + haskey(content, "components") || haskey(content, "load_components"), + "Top-level config missing mandatory entry"; + entry="at least one of: components, load_components", + ) + components = get(content, "components", Dict{String, Any}()) + load_components = get(content, "load_components", Vector{String}()) + + # Check all components that are directly defined. + for (k, v) in components + valid &= _validate_component(k, v) + end + + # Check multi-objective formulation. + if haskey(config["optimization"], "multiobjective") + mo = config["optimization"]["multiobjective"] + valid &= _vassert( + occursin("mo", lowercase(config["optimization"]["problem_type"])), + "Specifying `MO` is mandatory for multi-objective models"; + entry="config/optimization/problem_type", + ) + valid &= _vassert( + haskey(mo, "mode"), + "Top-level config missing mandatory entry"; + entry="config/optimization/multiobjective/mode", + ) + valid &= _vassert( + haskey(mo, "terms"), + "Top-level config missing mandatory entry"; + entry="config/optimization/multiobjective/terms", + ) + valid &= _vassert( + haskey(mo, "settings"), + "Top-level config missing mandatory entry"; + entry="config/optimization/multiobjective/settings", + ) + else + valid &= _vassert( + !occursin("mo", lowercase(config["optimization"]["problem_type"])), + "Top-level config missing mandatory entry (since `MO` is given in `problem_type`)"; + entry="config/optimization/multiobjective", + ) + end + + # TODO: Check that only allowed entries exist. + # TODO: Validate other things. + catch exception + valid &= _vassert(false, "An unexpected exception occurred"; filename=filename, exception=exception) + end + + return valid +end diff --git a/src/validation/yaml/iesopt.param.jl b/src/validation/yaml/iesopt.param.jl new file mode 100644 index 0000000..f014769 --- /dev/null +++ b/src/validation/yaml/iesopt.param.jl @@ -0,0 +1,18 @@ +function _validate_raw_yaml_iesopt_param(filename::String) + valid = true + + content = Dict{String, Any}() + try + merge!(content, YAML.load_file(filename; dicttype=Dict{String, Any})) + catch + return _vassert(false, "Could not load YAML file"; filename=filename) + end + + try + # TODO + catch exception + valid &= _vassert(false, "An unexpected exception occurred"; filename=filename, exception=exception) + end + + return valid +end diff --git a/src/validation/yaml/iesopt.template.jl b/src/validation/yaml/iesopt.template.jl new file mode 100644 index 0000000..76b9986 --- /dev/null +++ b/src/validation/yaml/iesopt.template.jl @@ -0,0 +1,43 @@ +function _validate_raw_yaml_iesopt_template(filename::String) + valid = true + + content = Dict{String, Any}() + try + merge!(content, YAML.load_file(filename; dicttype=Dict{String, Any})) + catch + return _vassert(false, "Could not load YAML file"; filename=filename) + end + + try + # TODO: Only allowed: "parameters", "components", "component". + + valid &= _vassert( + haskey(content, "components") || haskey(content, "component"), + "Template requires either `components` or `component`"; + filename=filename, + ) + + valid &= _vassert( + !(haskey(content, "components") && haskey(content, "component")), + "Template requires either `components` or `component`, but not both at the same time"; + filename=filename, + ) + + if haskey(content, "components") + for (k, v) in content["components"] + valid &= _validate_component(k, v) + end + end + + if haskey(content, "component") + template_name = split(splitpath(filename)[end], ".")[1] + valid &= _validate_component("_anonymouscomponent_$(template_name)", content["component"]) + end + + # TODO: Validate other things. + catch exception + valid &= _vassert(false, "An unexpected exception occurred"; filename=filename, exception=exception) + end + + return valid +end diff --git a/src/validation/yaml/yaml.jl b/src/validation/yaml/yaml.jl new file mode 100644 index 0000000..8a40370 --- /dev/null +++ b/src/validation/yaml/yaml.jl @@ -0,0 +1,17 @@ +include("iesopt.jl") +include("iesopt.template.jl") +include("iesopt.param.jl") + +function _validate_raw_yaml(filename::String) + _vassert(isfile(filename), "File does not exist"; filename=filename) || return false + + if endswith(filename, ".iesopt.yaml") + return _validate_raw_yaml_iesopt(filename) + elseif endswith(filename, ".iesopt.template.yaml") + return _validate_raw_yaml_iesopt_template(filename) + elseif endswith(filename, ".iesopt.param.yaml") + return _validate_raw_yaml_iesopt_param(filename) + end + + return _vassert(false, "File extension not recognized"; filename=filename) +end From 5f13fd5588b487604e743f5717b30def27782b4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 17:18:07 +0200 Subject: [PATCH 04/11] test: add initial version of tests, based on the internal commit 216607e3f7e61af5aae4e6fb1faf6470e7a5a385 (branch: open-source), with some slight modifications - including accounting for IESoptLib - and missing some functionality that has not been cleaned up yet. --- test/runtests.jl | 18 +- test/src/aqua.jl | 11 ++ test/src/basic.jl | 53 ++++++ test/src/examples.jl | 163 ++++++++++++++++++ test/src/texify.jl | 41 +++++ .../availability_test_failure.iesopt.yaml | 45 +++++ .../availability_test_success.iesopt.yaml | 45 +++++ test/test_files/carrier_mismatch.iesopt.yaml | 23 +++ .../components/TestComp.iesopt.template.yaml | 4 + .../filesystem/include_components.iesopt.yaml | 16 ++ .../include_components_slash.iesopt.yaml | 16 ++ ...clude_components_slash_windows.iesopt.yaml | 16 ++ .../include_dotslash_components.iesopt.yaml | 16 ++ ...lude_dotslash_components_slash.iesopt.yaml | 16 ++ ...slash_components_slash_windows.iesopt.yaml | 16 ++ ...de_dotslash_components_windows.iesopt.yaml | 16 ++ test/test_files/increased_fuel.iesopt.yaml | 61 +++++++ .../variable_unit_count.iesopt.yaml | 51 ++++++ .../test_files/wrong_problem_type.iesopt.yaml | 10 ++ 19 files changed, 634 insertions(+), 3 deletions(-) create mode 100644 test/src/aqua.jl create mode 100644 test/src/basic.jl create mode 100644 test/src/examples.jl create mode 100644 test/src/texify.jl create mode 100644 test/test_files/availability_test_failure.iesopt.yaml create mode 100644 test/test_files/availability_test_success.iesopt.yaml create mode 100644 test/test_files/carrier_mismatch.iesopt.yaml create mode 100644 test/test_files/filesystem/components/TestComp.iesopt.template.yaml create mode 100644 test/test_files/filesystem/include_components.iesopt.yaml create mode 100644 test/test_files/filesystem/include_components_slash.iesopt.yaml create mode 100644 test/test_files/filesystem/include_components_slash_windows.iesopt.yaml create mode 100644 test/test_files/filesystem/include_dotslash_components.iesopt.yaml create mode 100644 test/test_files/filesystem/include_dotslash_components_slash.iesopt.yaml create mode 100644 test/test_files/filesystem/include_dotslash_components_slash_windows.iesopt.yaml create mode 100644 test/test_files/filesystem/include_dotslash_components_windows.iesopt.yaml create mode 100644 test/test_files/increased_fuel.iesopt.yaml create mode 100644 test/test_files/variable_unit_count.iesopt.yaml create mode 100644 test/test_files/wrong_problem_type.iesopt.yaml diff --git a/test/runtests.jl b/test/runtests.jl index 2fff2bc..0324c87 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,16 +2,28 @@ using IESopt, Suppressor using Test, Aqua, JET import JuMP +const PATH_TEST = IESopt._PATHS[:test] +const PATH_EXAMPLES = IESopt._PATHS[:examples] +const PATH_TESTFILES = normpath(PATH_TEST, "test_files") +const PATH_CURRENT = pwd() + @testset "IESopt.jl" verbose = true begin @testset "Code quality (Aqua.jl)" begin - # Aqua.test_all(IESopt) + include("src/aqua.jl") end @testset "Code linting (JET.jl)" begin - JET.test_package(IESopt; target_defined_modules = true) + # JET.test_package(IESopt; target_defined_modules = true) end @testset "Basic (IESopt.jl)" begin - @test true + include("src/basic.jl") + end + + @testset "Examples (IESopt.jl)" begin + if isnothing(IESopt.Library) + else + include("src/examples.jl") + end end end diff --git a/test/src/aqua.jl b/test/src/aqua.jl new file mode 100644 index 0000000..cccc592 --- /dev/null +++ b/test/src/aqua.jl @@ -0,0 +1,11 @@ +@testset "Method ambiguity" begin + Aqua.test_ambiguities(IESopt) +end + +@testset "Persistent tasks" begin + # Aqua.test_persistent_tasks(IESopt) +end + +@testset "All" verbose = true begin + Aqua.test_all(IESopt; ambiguities=false, persistent_tasks=false) +end diff --git a/test/src/basic.jl b/test/src/basic.jl new file mode 100644 index 0000000..54cac56 --- /dev/null +++ b/test/src/basic.jl @@ -0,0 +1,53 @@ +@testset "Basic models" begin + model = generate!(joinpath(PATH_TESTFILES, "increased_fuel.iesopt.yaml"); verbosity=false) + optimize!(model) + @test JuMP.objective_value(model) ≈ 0 atol = 0.1 + + model = generate!(joinpath(PATH_TESTFILES, "variable_unit_count.iesopt.yaml"); verbosity=false) + optimize!(model) + @test JuMP.objective_value(model) ≈ 71000 atol = 0.1 + + model = generate!(joinpath(PATH_TESTFILES, "availability_test_success.iesopt.yaml"); verbosity=false) + optimize!(model) + @test JuMP.objective_value(model) ≈ 1000 atol = 0.1 + + model = generate!(joinpath(PATH_TESTFILES, "availability_test_failure.iesopt.yaml"); verbosity=false) + @suppress optimize!(model) # `@test_logs` fails because: https://github.com/JuliaLang/julia/issues/48456 + @test JuMP.termination_status(model) == JuMP.MOI.INFEASIBLE +end + +@testset "Filesystem paths" verbose = true begin + for fn in ( + "include_components.iesopt.yaml", + "include_components_slash.iesopt.yaml", + "include_dotslash_components.iesopt.yaml", + "include_dotslash_components_slash.iesopt.yaml", + "include_components_slash_windows.iesopt.yaml", + "include_dotslash_components_windows.iesopt.yaml", + "include_dotslash_components_slash_windows.iesopt.yaml", + ) + @testset "$(split(fn, ".")[1])" begin + # full path + model = JuMP.Model() + IESopt.generate!(model, joinpath(PATH_TESTFILES, "filesystem", fn); verbosity=false) + @test haskey(model.ext[:iesopt].input.noncore[:templates], "TestComp") + + # relative path + cd(PATH_TESTFILES) + model = JuMP.Model() + IESopt.generate!(model, joinpath("filesystem", fn); verbosity=false) + + # filename only + @test haskey(model.ext[:iesopt].input.noncore[:templates], "TestComp") + cd(joinpath(PATH_TESTFILES, "filesystem")) + model = JuMP.Model() + IESopt.generate!(model, fn; verbosity=false) + @test haskey(model.ext[:iesopt].input.noncore[:templates], "TestComp") + cd(PATH_CURRENT) + end + end +end + +# Clean up output files after testing is done. +rm(joinpath(PATH_TESTFILES, "out"); force=true, recursive=true) +rm(joinpath(PATH_TESTFILES, "filesystem", "out"); force=true, recursive=true) diff --git a/test/src/examples.jl b/test/src/examples.jl new file mode 100644 index 0000000..8661da6 --- /dev/null +++ b/test/src/examples.jl @@ -0,0 +1,163 @@ +const HiGHS = IESopt.HiGHS + +function _test_example_default_solver(filename::String; obj::Float64, verbosity::Union{Bool, String}=false, kwargs...) + @testset "$(split(filename, ".")[1])" begin + model = @suppress generate!(joinpath(PATH_EXAMPLES, filename); verbosity=verbosity, kwargs...) + @suppress optimize!(model) + @test JuMP.objective_value(model) ≈ obj atol = 0.1 + end +end + +# Run basic tests that check the objective value of the example against a prerecorded value. +_test_example_default_solver("01_basic_single_node.iesopt.yaml"; obj=525.0) +_test_example_default_solver("02_advanced_single_node.iesopt.yaml"; obj=1506.75, verbosity=true) +_test_example_default_solver("03_basic_two_nodes.iesopt.yaml"; obj=1225.0, verbosity="warning") +_test_example_default_solver("05_basic_two_nodes_1y.iesopt.yaml"; obj=667437.8) +_test_example_default_solver("06_recursion_h2.iesopt.yaml"; obj=18790.8) +_test_example_default_solver("07_csv_filestorage.iesopt.yaml"; obj=667437.8) +_test_example_default_solver("08_basic_investment.iesopt.yaml"; obj=2015.6) +_test_example_default_solver("09_csv_only.iesopt.yaml"; obj=667437.8) +_test_example_default_solver("10_basic_load_shedding.iesopt.yaml"; obj=25000 + 1083.9) +_test_example_default_solver("11_basic_unit_commitment.iesopt.yaml"; obj=1570.0) +_test_example_default_solver("12_incremental_efficiency.iesopt.yaml"; obj=3570.0) +_test_example_default_solver("15_varying_efficiency.iesopt.yaml"; obj=2131435.8) +_test_example_default_solver("16_noncore_components.iesopt.yaml"; obj=4372.2) +_test_example_default_solver("17_varying_connection_capacity.iesopt.yaml"; obj=300.0) +_test_example_default_solver("18_addons.iesopt.yaml"; obj=85.0) +_test_example_default_solver("25_global_parameters.iesopt.yaml"; obj=50.0) +_test_example_default_solver("25_global_parameters.iesopt.yaml"; obj=100.0, demand=10) +_test_example_default_solver("26_initial_states.iesopt.yaml"; obj=150.0, store_initial_state=15) +_test_example_default_solver("26_initial_states.iesopt.yaml"; obj=0.0, store_initial_state=50) +_test_example_default_solver("27_piecewise_linear_costs.iesopt.yaml"; obj=450.0) +_test_example_default_solver("29_advanced_unit_commitment.iesopt.yaml"; obj=7000.0) +_test_example_default_solver("30_representative_snapshots.iesopt.yaml"; obj=319100.0) +_test_example_default_solver("44_lossy_connections.iesopt.yaml"; obj=1233.75) + +# Run tests that manually check various outcomes of example models. + +@testset "04_constraint_safety" begin + model = generate!(joinpath(PATH_EXAMPLES, "04_constraint_safety.iesopt.yaml"); verbosity=false) + optimize!(model) + @test JuMP.value(model.ext[:iesopt].model.objectives["total_cost"].expr) ≈ 2975.0 atol = 0.05 + @test sum(JuMP.value.(values(model.ext[:iesopt].aux.constraint_safety_expressions))) ≈ 1 +end + +# NOTE: This example fails because it tries to read two snapshots from a CSV file containing only one row. +# model = JuMP.direct_model(HiGHS.Optimizer()) +# generate!(model, joinpath(dir, "19_etdfs.iesopt.yaml"); verbosity=false) +# optimize!(model) +# @test JuMP.objective_value(model) ≈ 95.7 atol = 0.1 +# @test sum(JuMP.value.(values(model.ext[:iesopt].aux.constraint_safety_expressions))) ≈ 0 + +@testset "20_chp" begin + model = generate!(joinpath(PATH_EXAMPLES, "20_chp.iesopt.yaml"); verbosity=false) + optimize!(model) + @test all( + JuMP.value.(component(model, "chp.power").exp.out_electricity) .== + [2.75, 5.50, 7.00, 8.00, 9.00, 10.00, 5.00, 5.00, 9.00], + ) + @test all( + JuMP.value.(component(model, "chp.heat").exp.out_heat) .== + [5.00, 10.00, 10.00, 10.00, 5.00, 0.00, 0.00, 5.00, 5.00], + ) + @test all( + JuMP.value.(component(model, "create_gas").exp.value) .== + [9.375, 18.75, 22.5, 25.0, 25.0, 25.0, 12.5, 15.0, 25.0], + ) +end + +# model = JuMP.direct_model(HiGHS.Optimizer()) +# generate!(model, joinpath(dir, "21_aggregated_snapshots.iesopt.yaml")) +# optimize!(model) +# @test all(JuMP.value.(component(model, "buy").exp.value) .≈ [19.0 / 3.0, 3.0, 2.0]) +# @test sum(JuMP.value.(values(model.ext[:iesopt].aux.constraint_safety_expressions))) ≈ 0 + +@testset "snapshots (22 and 23)" begin + model = generate!(joinpath(PATH_EXAMPLES, "22_snapshot_weights.iesopt.yaml"); verbosity=false) + optimize!(model) + @test all(JuMP.value.(component(model, "buy").exp.value) .≈ [10.0, 6.0, 6.0, 0.0, 7.0, 4.0]) + obj_val_example_22 = JuMP.objective_value(model) + _test_example_default_solver("23_snapshots_from_csv.iesopt.yaml"; obj=obj_val_example_22) +end + +# model = generate!(joinpath(dir, "24_linearized_optimal_powerflow.iesopt.yaml"); verbosity=false) +# optimize!(model) +# @test JuMP.value(model.ext[:iesopt].model.objectives["total_cost"].expr) ≈ 5333.16 atol = 0.05 +# @test JuMP.objective_value(model) ≈ (50 * 10000 + 5333.16) atol = 0.05 +# @test sum(JuMP.value.(values(model.ext[:iesopt].aux.constraint_safety_expressions))) ≈ 50 +# ac_flows = [ +# round(JuMP.value(component(model, conn).exp.pf_flow[1]); digits=3) for +# conn in ["conn12", "conn23", "conn24", "conn34", "conn56", "conn57"] +# ] +# dc_flows = [round(JuMP.value(component(model, conn).var.flow[1]); digits=3) for conn in ["hvdc1", "hvdc2"]] +# @test all(ac_flows .== [133.368, -54.421, 187.789, 242.211, -50.0, 300.0]) +# @test all(dc_flows .== [70.0, 250.0]) + +# todo: activate again, as soon as example is reworked +# model = JuMP.direct_model(HiGHS.Optimizer()) +# generate!(model, joinpath(dir, "28_expressions.iesopt.yaml")) +# optimize!(model) +# @test JuMP.objective_value(model) ≈ 2000.0 +# set_expression_term_value(component(model, "demand_value"), 1, [80 / 3, 100 / 3]) +# optimize!(model) +# @test JuMP.objective_value(model) ≈ 2000.0 + +@testset "31_exclusive_operation" begin + model = generate!(joinpath(PATH_EXAMPLES, "31_exclusive_operation.iesopt.yaml"); verbosity=false) + optimize!(model) + @test JuMP.objective_value(model) ≈ -10.0 + @test JuMP.value.(IESopt.component(model, "buy_id").exp.value) == [1, 0, 1, 0] + @test JuMP.value.(IESopt.component(model, "sell_id").exp.value) == [0, 1, 0, 1] +end + +# Disabled, because Benders needs to modify Decisions (which is currently not possible due to immutability). +# @testset "Benders decomposition" begin +# model = generate!(joinpath(PATH_EXAMPLES, "33_benders_investment.iesopt.yaml"); verbosity=false) +# optimize!(model) +# _conventional_obj = JuMP.objective_value(model) +# benders_data = benders(HiGHS.Optimizer, joinpath(PATH_EXAMPLES, "33_benders_investment.iesopt.yaml"); verbosity=false) +# _benders_obj = JuMP.objective_value(benders_data.main) +# @test _conventional_obj ≈ _benders_obj atol = (_conventional_obj * 1e-4) + +# _conventional_obj = 91539936.2678 # too slow for the test +# benders_data = benders(HiGHS.Optimizer, joinpath(PATH_EXAMPLES, "35_fixed_costs.iesopt.yaml"); verbosity=false) +# _benders_obj = JuMP.objective_value(benders_data.main) +# @test _conventional_obj ≈ _benders_obj atol = (_conventional_obj * 1e-4) +# end + +@testset "37_certificates" begin + model = generate!(joinpath(PATH_EXAMPLES, "37_certificates.iesopt.yaml"); verbosity=false) + optimize!(model) + @test JuMP.objective_value(model) ≈ 44376.75 atol = 0.01 + @test sum(IESopt.extract_result(model, "plant_gas", "in:gas"; mode="value")) ≈ 986.15 atol = 0.01 + @test sum(IESopt.extract_result(model, "electrolysis", "in:electricity"; mode="value")) ≈ 758.58 atol = 0.01 +end + +@testset "47_disable_components" begin + model_coupled = + generate!(joinpath(PATH_EXAMPLES, "47_disable_components.iesopt.yaml"); mode="coupled", verbosity=false) + model_individual = + generate!(joinpath(PATH_EXAMPLES, "47_disable_components.iesopt.yaml"); mode="individual", verbosity=false) + model_AT_DE = generate!( + joinpath(PATH_EXAMPLES, "47_disable_components.iesopt.yaml"); + mode="coupled", + enable_CH=false, + verbosity=false, + ) + model_CH = generate!( + joinpath(PATH_EXAMPLES, "47_disable_components.iesopt.yaml"); + enable_DE=false, + enable_AT=false, + verbosity=false, + ) + optimize!(model_coupled) + optimize!(model_individual) + optimize!(model_AT_DE) + optimize!(model_CH) + @test JuMP.objective_value(model_coupled) <= + JuMP.objective_value(model_AT_DE) + JuMP.objective_value(model_CH) <= + JuMP.objective_value(model_individual) +end + +# Clean up output files after testing is done. +rm(joinpath(PATH_EXAMPLES, "out"); force=true, recursive=true) diff --git a/test/src/texify.jl b/test/src/texify.jl new file mode 100644 index 0000000..85b6f8b --- /dev/null +++ b/test/src/texify.jl @@ -0,0 +1,41 @@ +module TestTexify + +using Test, IESopt + +function _test_texify() + @test IESopt._int2timeidx(5) == "{t+5}" + @test IESopt._int2timeidx(-1) == "{t-1}" + @test IESopt._int2timeidx(0) == "t" + + @test IESopt._escape_variable("parent.var[5]", 3) == "\\vb{parent}_{\\vb{var}_{t+2}}" + @test IESopt._escape_variable("parent.var[5]", 3; fixed_t=true) == "\\vb{parent}_{\\vb{var}_5}" + + # This is due to floating point representation + @test IESopt._expr_tostring([("x_1", -1.0), ("x_2", -1.005), ("y", "α")]; digits=2) == + "{-~x_1} - {1.0\\cdot x_2} + {α\\cdot y} " + @test IESopt._expr_tostring([("x_1", 1.0), ("x_2", -10.005), ("y", "α")]; digits=2) == + "{x_1} - {10.01\\cdot x_2} + {α\\cdot y} " + @test IESopt._expr_tostring([("\\beta", -1.4), ("x_2", -1.005), ("y", "\\alpha")]; digits=2) == + "{-~1.4\\cdot \\beta} - {1.0\\cdot x_2} + {\\alpha\\cdot y} " + + return nothing +end + +""" + runtests() + +Runs all tests that are properly defined here (starting with "_test_")), suppressing `stdout` and `stderr`. +""" +function runtests() + for name in names(@__MODULE__; all=true) + if startswith("$(name)", "_test_") + @testset "$(name)" begin + getfield(@__MODULE__, name)() + end + end + end +end + +end # TestTexify + +TestTexify.runtests() diff --git a/test/test_files/availability_test_failure.iesopt.yaml b/test/test_files/availability_test_failure.iesopt.yaml new file mode 100644 index 0000000..9a487ee --- /dev/null +++ b/test/test_files/availability_test_failure.iesopt.yaml @@ -0,0 +1,45 @@ +config: + optimization: + problem_type: MILP + snapshots: + count: 4 + solver: + name: highs + +carriers: + electricity: {} + gas: {} + +components: + elec: + type: Node + carrier: electricity + + gas: + type: Node + carrier: gas + + plant_gas: + type: Unit + inputs: {gas: gas} + outputs: {electricity: elec} + conversion: 1 gas -> 1 electricity + conversion_at_min: 1 gas -> 0.5 electricity + capacity: 500 out:electricity + unit_commitment: binary + startup_cost: 1000 + min_conversion: 0.2 + availability: [1000, 250, 50, 1000] + adapt_min_to_availability: false + + demand: + type: Profile + carrier: electricity + node_from: elec + value: [0, 250, 50, 110] + + buy_gas: + type: Profile + carrier: gas + node_to: gas + mode: create diff --git a/test/test_files/availability_test_success.iesopt.yaml b/test/test_files/availability_test_success.iesopt.yaml new file mode 100644 index 0000000..da8652c --- /dev/null +++ b/test/test_files/availability_test_success.iesopt.yaml @@ -0,0 +1,45 @@ +config: + optimization: + problem_type: MILP + snapshots: + count: 4 + solver: + name: highs + +carriers: + electricity: {} + gas: {} + +components: + elec: + type: Node + carrier: electricity + + gas: + type: Node + carrier: gas + + plant_gas: + type: Unit + inputs: {gas: gas} + outputs: {electricity: elec} + conversion: 1 gas -> 1 electricity + conversion_at_min: 1 gas -> 0.5 electricity + capacity: 500 out:electricity + unit_commitment: binary + startup_cost: 1000 + min_conversion: 0.2 + availability: [1000, 250, 50, 1000] + adapt_min_to_availability: true + + demand: + type: Profile + carrier: electricity + node_from: elec + value: [0, 250, 50, 110] + + buy_gas: + type: Profile + carrier: gas + node_to: gas + mode: create diff --git a/test/test_files/carrier_mismatch.iesopt.yaml b/test/test_files/carrier_mismatch.iesopt.yaml new file mode 100644 index 0000000..316aafc --- /dev/null +++ b/test/test_files/carrier_mismatch.iesopt.yaml @@ -0,0 +1,23 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + +carriers: + electricity: {} + gas: {} + +components: + node: + type: Node + carrier: electricity + + plant_gas: + type: Unit + inputs: {gas: node} + outputs: {electricity: node} + conversion: 1 gas -> 1 electricity + capacity: 10 out:electricity diff --git a/test/test_files/filesystem/components/TestComp.iesopt.template.yaml b/test/test_files/filesystem/components/TestComp.iesopt.template.yaml new file mode 100644 index 0000000..4cacc09 --- /dev/null +++ b/test/test_files/filesystem/components/TestComp.iesopt.template.yaml @@ -0,0 +1,4 @@ +components: + node: + type: Node + carrier: c diff --git a/test/test_files/filesystem/include_components.iesopt.yaml b/test/test_files/filesystem/include_components.iesopt.yaml new file mode 100644 index 0000000..54c131b --- /dev/null +++ b/test/test_files/filesystem/include_components.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: components + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/filesystem/include_components_slash.iesopt.yaml b/test/test_files/filesystem/include_components_slash.iesopt.yaml new file mode 100644 index 0000000..be07c23 --- /dev/null +++ b/test/test_files/filesystem/include_components_slash.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: components/ + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/filesystem/include_components_slash_windows.iesopt.yaml b/test/test_files/filesystem/include_components_slash_windows.iesopt.yaml new file mode 100644 index 0000000..f1e948b --- /dev/null +++ b/test/test_files/filesystem/include_components_slash_windows.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: components\\ + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/filesystem/include_dotslash_components.iesopt.yaml b/test/test_files/filesystem/include_dotslash_components.iesopt.yaml new file mode 100644 index 0000000..eaf9b32 --- /dev/null +++ b/test/test_files/filesystem/include_dotslash_components.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: ./components + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/filesystem/include_dotslash_components_slash.iesopt.yaml b/test/test_files/filesystem/include_dotslash_components_slash.iesopt.yaml new file mode 100644 index 0000000..fca0cd3 --- /dev/null +++ b/test/test_files/filesystem/include_dotslash_components_slash.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: ./components/ + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/filesystem/include_dotslash_components_slash_windows.iesopt.yaml b/test/test_files/filesystem/include_dotslash_components_slash_windows.iesopt.yaml new file mode 100644 index 0000000..98ef5f6 --- /dev/null +++ b/test/test_files/filesystem/include_dotslash_components_slash_windows.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: .\\components\\ + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/filesystem/include_dotslash_components_windows.iesopt.yaml b/test/test_files/filesystem/include_dotslash_components_windows.iesopt.yaml new file mode 100644 index 0000000..5a83578 --- /dev/null +++ b/test/test_files/filesystem/include_dotslash_components_windows.iesopt.yaml @@ -0,0 +1,16 @@ +config: + optimization: + problem_type: LP + snapshots: + count: 1 + solver: + name: highs + paths: + templates: .\\components + +carriers: + c: {} + +components: + test: + type: TestComp diff --git a/test/test_files/increased_fuel.iesopt.yaml b/test/test_files/increased_fuel.iesopt.yaml new file mode 100644 index 0000000..7b5b129 --- /dev/null +++ b/test/test_files/increased_fuel.iesopt.yaml @@ -0,0 +1,61 @@ +config: + optimization: + problem_type: MILP + snapshots: + count: 4 + solver: + name: highs + +carriers: + electricity: {} + water: {} + hydrogen: {} + heat: {} + +components: + elec: + type: Node + carrier: electricity + + h2: + type: Node + carrier: hydrogen + + water: + type: Node + carrier: water + + heat: + type: Node + carrier: heat + has_state: true + state_cyclic: geq + state_ub: 10000 + + electrolyser: + type: Unit + inputs: {electricity: elec, water: water} + outputs: {hydrogen: h2, heat: heat} + conversion: 1 electricity + 2 water -> 2 hydrogen + 1 heat + capacity: 10 out:hydrogen + min_conversion: 0.5 + conversion_at_min: 1 electricity + 1 water -> 1 hydrogen + 2 heat + unit_commitment: binary + + demand: + type: Profile + carrier: hydrogen + node_from: h2 + value: [5, 5, 7.5, 10] + + buy_elec: + type: Profile + carrier: electricity + node_to: elec + mode: create + + buy_water: + type: Profile + carrier: water + node_to: water + mode: create diff --git a/test/test_files/variable_unit_count.iesopt.yaml b/test/test_files/variable_unit_count.iesopt.yaml new file mode 100644 index 0000000..1c88b8e --- /dev/null +++ b/test/test_files/variable_unit_count.iesopt.yaml @@ -0,0 +1,51 @@ +config: + optimization: + problem_type: MILP + snapshots: + count: 4 + solver: + name: highs + +carriers: + electricity: {} + gas: {} + +components: + elec: + type: Node + carrier: electricity + + gas: + type: Node + carrier: gas + + build_gas: + type: Decision + cost: 5000 + ub: 100 + mode: integer + + plant_gas: + type: Unit + inputs: {gas: gas} + outputs: {electricity: elec} + conversion: 1 gas -> 1 electricity + conversion_at_min: 1 gas -> 0.5 electricity + unit_count: build_gas:value + capacity: 50 out:electricity + marginal_cost: 100 per out:electricity + unit_commitment: integer + startup_cost: 1000 + min_conversion: 0.2 + + demand: + type: Profile + carrier: electricity + node_from: elec + value: [0, 250, 50, 110] + + buy_gas: + type: Profile + carrier: gas + node_to: gas + mode: create diff --git a/test/test_files/wrong_problem_type.iesopt.yaml b/test/test_files/wrong_problem_type.iesopt.yaml new file mode 100644 index 0000000..63548c4 --- /dev/null +++ b/test/test_files/wrong_problem_type.iesopt.yaml @@ -0,0 +1,10 @@ +config: + optimization: + problem_type: NLP + snapshots: + count: 1 + solver: + name: highs + +carriers: {} +components: {} From 679e8f6ececd10fe40d79f7996735e5d630e9400 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 17:18:51 +0200 Subject: [PATCH 05/11] chore: change IESoptLib to a weakdep; add all internally registered solvers as weakdeps using "self-extension" --- Project.toml | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 5c29086..b0ae87f 100644 --- a/Project.toml +++ b/Project.toml @@ -8,7 +8,6 @@ CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" -IESoptLib = "b98f706d-40ec-4ce6-a66c-1c6e71d3cef6" JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819" JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" @@ -28,13 +27,36 @@ Tectonic = "9ac5f52a-99c6-489f-af81-462ef484790f" YAML = "ddb6d928-2868-570f-bddf-ab3f9cf99eb6" ZipFile = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea" +[weakdeps] +CPLEX = "a076750e-1247-5638-91d2-ce28b192dca0" +Cbc = "9961bab8-2fa3-5c5a-9d89-47fab24efd76" +GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6" +Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b" +IESoptLib = "b98f706d-40ec-4ce6-a66c-1c6e71d3cef6" +Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" +SCIP = "82193955-e24f-5292-bf16-6f2c5261a85f" + +[extensions] +CPLEX = "CPLEX" +Cbc = "Cbc" +GLPK = "GLPK" +Gurobi = "Gurobi" +IESoptLib = "IESoptLib" +Ipopt = "Ipopt" +SCIP = "SCIP" + [compat] ArgCheck = "2.3.0" +CPLEX = "1.0.3" CSV = "0.10" +Cbc = "1.2.0" DataFrames = "1.6" Dates = "<0.0.1,1" +GLPK = "1.2.1" +Gurobi = "1.3.0" HiGHS = "1.9" IESoptLib = "0.1" +Ipopt = "1.6.2" JLD2 = "0.4" JSON = "0.21" JuMP = "1.22" @@ -49,6 +71,7 @@ PrecompileTools = "1.2" Printf = "<0.0.1,1" ProgressMeter = "1.10" RuntimeGeneratedFunctions = "0.5.13" +SCIP = "0.11.14" Suppressor = "0.2" Tectonic = "0.8" YAML = "0.4" From 2524fe502af484c8216e96a97a8ca8ce0ec14a09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 17:25:39 +0200 Subject: [PATCH 06/11] chore: change compat entry for Pkg to fix CI on 1.9 image --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index b0ae87f..ab3a91c 100644 --- a/Project.toml +++ b/Project.toml @@ -66,7 +66,7 @@ LoggingExtras = "1.0" Markdown = "<0.0.1,1" MultiObjectiveAlgorithms = "1.3" OrderedCollections = "1.6" -Pkg = "1.10" +Pkg = "1.9,1.10" PrecompileTools = "1.2" Printf = "<0.0.1,1" ProgressMeter = "1.10" From 00ffd5eb1d09bd5146bea7248a9e95e61562ac56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 18:32:38 +0200 Subject: [PATCH 07/11] docs: remove IESoptLib as direct dependency from docs environment --- docs/Project.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/Project.toml b/docs/Project.toml index f510157..8a83566 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,5 +1,4 @@ [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" IESopt = "ed3f0a38-8ad9-4cf8-877e-929e8d190fe9" -IESoptLib = "b98f706d-40ec-4ce6-a66c-1c6e71d3cef6" LiveServer = "16fef848-5104-11e9-1b77-fb7a48bbb589" From 77373e2b140f9cb79dc52bd31928703f48669fba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 18:33:23 +0200 Subject: [PATCH 08/11] chore: up julia compat requirement, and revoke change in Pkg compat entry, to accomodate RuntimeGeneratedFunctions (which compats on julia = "1.10") --- Project.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index ab3a91c..d65f1fe 100644 --- a/Project.toml +++ b/Project.toml @@ -66,7 +66,7 @@ LoggingExtras = "1.0" Markdown = "<0.0.1,1" MultiObjectiveAlgorithms = "1.3" OrderedCollections = "1.6" -Pkg = "1.9,1.10" +Pkg = "1.10" PrecompileTools = "1.2" Printf = "<0.0.1,1" ProgressMeter = "1.10" @@ -76,4 +76,4 @@ Suppressor = "0.2" Tectonic = "0.8" YAML = "0.4" ZipFile = "0.10" -julia = "1.9" +julia = "1.10" From f2c29c38203cb8adb9042009edc0f1df6d029a8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 18:35:30 +0200 Subject: [PATCH 09/11] chore: remove 1.9 as testing target from CI --- .github/workflows/CI.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index eabf7c7..46cf465 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -24,7 +24,6 @@ jobs: matrix: version: - '1.10' - - '1.9' os: - ubuntu-latest arch: From 362db518d468785aa5ddb4679700c73a0bd737bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 18:47:29 +0200 Subject: [PATCH 10/11] docs: build docs for "stable,dev,all-minor" versions, and enable push previews for PRs --- docs/make.jl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/make.jl b/docs/make.jl index 8c0338d..dd6a84a 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -44,6 +44,13 @@ makedocs(; if !is_local_draft deploydocs(; repo="github.com/ait-energy/IESopt.jl", - devbranch="dev", + push_preview=true, # previews for PRs (not from forks) + versions = [ + "stable" => "v^", # "stable" => latest version + "v#.#", # include all minor versions + "dev" => "dev", + # "v#.#.#", # use this to include all released versions + # "v1.1.6", # use this to include a specific version + ] ) end From 6e4f9030f50f94db6874737637d23076f0ae7af1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stefan=20Str=C3=B6mer?= Date: Mon, 3 Jun 2024 18:47:55 +0200 Subject: [PATCH 11/11] docs: add CI workflow to cleanup leftover files from push previews --- .github/workflows/DocPreviewCleanup.yml | 35 +++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 .github/workflows/DocPreviewCleanup.yml diff --git a/.github/workflows/DocPreviewCleanup.yml b/.github/workflows/DocPreviewCleanup.yml new file mode 100644 index 0000000..2d3c291 --- /dev/null +++ b/.github/workflows/DocPreviewCleanup.yml @@ -0,0 +1,35 @@ +# See: https://documenter.juliadocs.org/stable/man/hosting/#Cleaning-up-gh-pages +# for more information on why and how to use this workflow. + +name: DocPreviewCleanup + +on: + pull_request: + types: [closed] + +concurrency: + group: doc-preview-cleanup + cancel-in-progress: false + +jobs: + doc-preview-cleanup: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout gh-pages branch + uses: actions/checkout@v4 + with: + ref: gh-pages + - name: Delete preview and history + push changes + run: | + if [ -d "${preview_dir}" ]; then + git config user.name "Documenter.jl" + git config user.email "documenter@juliadocs.github.io" + git rm -rf "${preview_dir}" + git commit -m "delete preview" + git branch gh-pages-new $(echo "delete history" | git commit-tree HEAD^{tree}) + git push --force origin gh-pages-new:gh-pages + fi + env: + preview_dir: previews/PR${{ github.event.number }}