Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion idaes/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,7 @@ def _new_idaes_config_block():
"scale_model",
pyomo.common.config.ConfigValue(
domain=Bool,
default=False, # TODO: Change to true once transition complete
default=True,
Copy link
Contributor

@adam-a-a adam-a-a Aug 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high-level: what happens when we do not set scale_model to True by default? At this point, I am a bit mixed up on what's happening with regard to scaling and how the solver will "see" things.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When scale_model == False, then scaling factors are not applied to the model at the time an .nl file is written for the solver. Constraints transformed through constraint_scaling_transform will have their scaled form passed to the solver. If you pass the nlp_scaling_method=user-scaling option to IPOPT, IPOPT will use the user-provided scaling factors when computing and inverting the barrier hessian instead of its own gradient-scaling method. However, it will not use scaled values of the variables when computing the distance of a variable to its bounds or when computing the constraint residual (the latter fact being the reason that constraint_scaling_transform was created).

Now that scale_model==True, solvers that support the new .nl writer (definitely IPOPT, I'm not sure about other solvers) will only see the scaled model. The Jacobian will be computed using the scaled variables, and IPOPT will apply its gradient-based scaling on top of the user scaling factors (which is a good thing). Scaled values of variables will be used to compute the distance to bounds and scaled values will be used in computing the constraint residual (so we no longer need constraint_scaling_transform).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In light the benefits described in your second paragraph, why would one want to set scale_model == False? Is there a situation when that would be more useful than True?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Prior to Pyomo 6.9.3, there was a bug in how scaling factors were applied in linear named Expressions---namely, they weren't applied in such expressions. Because those Expressions gave incorrect values, most models in the IDAES repo either failed to solve or gave erroneous results. It was fixed in this release, which is why I'm making this change now.

The only situation where you wouldn't want to scale the model is if you suspect a constraint has both a scaling factor set and has been transformed through constraint_scaling_transform. Then using scale_model == True results in double scaling, whereas with scale_model == False the double scaling would be less of a problem. That's why I went through and added a bunch of Exceptions if the user tried double scaling things.

description="Whether to apply model scaling in writer",
),
)
Expand Down
58 changes: 58 additions & 0 deletions idaes/core/scaling/tests/test_scaling_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from io import StringIO
import os
import re
import pytest

import numpy as np
Expand All @@ -30,6 +31,7 @@
MethaneParameterBlock as MethaneCombustionParameterBlock,
)
from idaes.core.util import from_json, StoreSpec
from idaes.core.util.scaling import constraint_scaling_transform
from idaes.core.scaling import set_scaling_factor
from idaes.core.scaling.scaler_profiling import ScalingProfiler

Expand Down Expand Up @@ -207,6 +209,62 @@ def test_solve_perturbed_state(self):
assert res["iterations"] == 0


def demo_model_cst():
m = ConcreteModel()

m.v1 = Var(initialize=2)
m.v1.fix()
m.v2 = Var(initialize=4)
m.v3 = Var(initialize=-6)

m.c1 = Constraint(expr=m.v2 == m.v1**2)
m.c2 = Constraint(expr=0 == m.v1 + m.v2 + m.v3)

constraint_scaling_transform(m.c2, 0.5)

return m


def demo_scaling_cst(model):
set_scaling_factor(model.v2, 0.5)
constraint_scaling_transform(model.c1, 0.5)


@pytest.mark.unit
def test_constraint_scaling_transform():
# In this case the user has put old-style scaling in the build_model method
sp = ScalingProfiler(
build_model=demo_model_cst,
user_scaling=demo_scaling,
perturb_state=demo_pertubration,
)
with pytest.raises(
RuntimeError,
match=re.escape(
"Attempted to set constraint scaling factor for transformed constraint c2. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
),
):
_ = sp.profile_scaling_methods()

# Here the user includes old-style scaling in the scaling method
sp = ScalingProfiler(
build_model=demo_model,
user_scaling=demo_scaling_cst,
perturb_state=demo_pertubration,
)
with pytest.raises(
RuntimeError,
match=re.escape(
"Attempted to set constraint scaling factor for transformed constraint c1. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
),
):
_ = sp.profile_scaling_methods()


# Case study using Gibbs reactor model
# Get solution json from scaling tests
FILENAME = "gibbs_solution.json"
Expand Down
47 changes: 47 additions & 0 deletions idaes/core/scaling/tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
unscaled_variables_generator,
unscaled_constraints_generator,
)
from idaes.core.util.scaling import constraint_scaling_transform
import idaes.logger as idaeslog

currdir = this_file_dir()
Expand Down Expand Up @@ -1742,6 +1743,52 @@ def test_set_scaling_factor_overwrite_false(self, caplog):
)
assert m.scaling_factor[m.v] == 10

@pytest.mark.unit
def test_set_scaling_factor_transformed_constraint(self):
m = ConcreteModel()
m.x = Var(initialize=500)
m.c1 = Constraint(expr=m.x <= 1e3)
m.c2 = Constraint(expr=m.x == 1e3)
m.c3 = Constraint(expr=m.x >= 1e3)

def indexed_constraint_rule(b, idx):
return b.x == 1e3

m.c4 = Constraint([1, 2, 3], rule=indexed_constraint_rule)

def match(cname):
return re.escape(
f"Attempted to set constraint scaling factor for transformed constraint {cname}. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)

constraint_scaling_transform(m.c1, 1e-3)
constraint_scaling_transform(m.c2, 1e-3)
constraint_scaling_transform(m.c3, 1e-3)
for idx in m.c4:
constraint_scaling_transform(m.c4[idx], 1e-3)

with pytest.raises(RuntimeError, match=match("c1")):
set_scaling_factor(m.c1, 1e-3)
with pytest.raises(RuntimeError, match=match("c2")):
set_scaling_factor(m.c2, 1e-3)
with pytest.raises(RuntimeError, match=match("c3")):
set_scaling_factor(m.c3, 1e-3)
with pytest.raises(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you have a test of the correct way to apply scaling to c4?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm afraid I don't follow. I test applying a scaling factor both to the constraint as a whole as well as the individual indices. What other case do you want to test?

Copy link
Contributor

@bpaul4 bpaul4 Aug 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay, I see now that the allowed case is also being tested. I only saw the error check on c4 before.

RuntimeError,
match=re.escape(
"Attempted to set constraint scaling factor for indexed constraint c4 "
"with transformed ConstraintData children. Please use only one of "
"set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
),
):
set_scaling_factor(m.c4, 1e-3)
for idx in m.c4:
with pytest.raises(RuntimeError, match=match(f"c4[{idx}]")):
set_scaling_factor(m.c4[idx], 1e-3)

@pytest.fixture
def model_expr(self):
m = ConcreteModel()
Expand Down
25 changes: 22 additions & 3 deletions idaes/core/scaling/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,13 @@
from pyomo.core.base.constraint import ConstraintData
from pyomo.core.base.expression import ExpressionData
from pyomo.core.base.param import ParamData
from pyomo.core.base.constraint import ConstraintData

Check warning on line 48 in idaes/core/scaling/util.py

View workflow job for this annotation

GitHub Actions / Pylint

W0404 (reimported)

Reimport 'ConstraintData' (imported line 45)
from pyomo.core import expr as EXPR
from pyomo.common.numeric_types import native_types
from pyomo.core.base.units_container import _PyomoUnit

from idaes.core.util.exceptions import BurntToast
from idaes.core.util.scaling import get_constraint_transform_applied_scaling_factor
import idaes.logger as idaeslog


Expand Down Expand Up @@ -509,14 +511,31 @@
elif math.isnan(scaling_factor):
raise ValueError(f"scaling factor for {component.name} is NaN.")

if isinstance(component, ConstraintData):
xform_factor = get_constraint_transform_applied_scaling_factor(component)
if xform_factor is not None:
raise RuntimeError(
f"Attempted to set constraint scaling factor for transformed constraint {component.name}. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)
elif isinstance(component, Constraint):
for condata in component.values():
xform_factor = get_constraint_transform_applied_scaling_factor(condata)
if xform_factor is not None:
raise RuntimeError(
"Attempted to set constraint scaling factor for indexed constraint "
f"{component.name} with transformed ConstraintData children. Please "
"use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)

if component.is_indexed():
# What if a scaling factor already exists for the indexed component?
# for idx in component:
# set_scaling_factor(component[idx], scaling_factor=scaling_factor, overwrite=overwrite)
raise TypeError(
f"Component {component.name} is indexed. Set scaling factors for individual indices instead."
)

# Get suffix and assign scaling factor
sfx = get_component_scaling_suffix(component)

if not overwrite and component in sfx:
Expand Down
4 changes: 2 additions & 2 deletions idaes/core/solvers/tests/test_solvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ def test_get_solver_ipopt_v2():
assert solver.options.max_iter == 200

assert solver.config.writer_config.linear_presolve
assert not solver.config.writer_config.scale_model
assert solver.config.writer_config.scale_model


@pytest.mark.skipif(not pyo.SolverFactory("ipopt").available(False), reason="no Ipopt")
Expand All @@ -308,7 +308,7 @@ def test_get_solver_ipopt_v2_w_options():
solver = get_solver(
"ipopt_v2",
options={"tol": 1e-5, "foo": "bar"},
writer_config={"linear_presolve": False},
writer_config={"linear_presolve": False, "scale_model": False},
)

assert isinstance(solver, LegacySolverWrapper)
Expand Down
33 changes: 32 additions & 1 deletion idaes/core/util/scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,22 @@
from pyomo.contrib.pynumero.interfaces.pyomo_nlp import PyomoNLP
from pyomo.contrib.pynumero.asl import AmplInterface
from pyomo.common.modeling import unique_component_name
from pyomo.core.base.constraint import ConstraintData
from pyomo.core.base.constraint import ConstraintData, Constraint
from pyomo.common.collections import ComponentMap, ComponentSet
from pyomo.dae import DerivativeVar
from pyomo.dae.flatten import slice_component_along_sets
from pyomo.util.calc_var_value import calculate_variable_from_constraint
from pyomo.core import expr as EXPR
from pyomo.common.numeric_types import native_types
from pyomo.core.base.units_container import _PyomoUnit
from pyomo.core.base.suffix import SuffixFinder

import idaes.logger as idaeslog

_log = idaeslog.getLogger(__name__)

sfFinder = SuffixFinder("scaling_factor")


def __none_left_mult(x, y):
"""PRIVATE FUNCTION, If x is None return None, else return x * y"""
Expand Down Expand Up @@ -230,6 +233,26 @@ def set_scaling_factor(c, v, data_objects=True, overwrite=True):
# doesn't exist. This handles the case where you get a constant 0 and
# need its scale factor to scale the mass balance.
return 1

if isinstance(c, ConstraintData):
xform_factor = get_constraint_transform_applied_scaling_factor(c)
if xform_factor is not None:
raise RuntimeError(
f"Attempted to set constraint scaling factor for transformed constraint {c.name}. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)
elif isinstance(c, Constraint):
for condata in c.values():
xform_factor = get_constraint_transform_applied_scaling_factor(condata)
if xform_factor is not None:
raise RuntimeError(
f"Attempted to set constraint scaling factor for indexed constraint {c.name}"
"with transformed ConstraintData children. Please use only one of "
"set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)

try:
suf = c.parent_block().scaling_factor

Expand Down Expand Up @@ -460,6 +483,14 @@ def constraint_scaling_transform(c, s, overwrite=True):
s = pyo.value(s)
if not isinstance(c, ConstraintData):
raise TypeError(f"{c} is not a constraint or is an indexed constraint")
scaling_factor = sfFinder.find(component_data=c)
if scaling_factor is not None:
raise RuntimeError(
f"Attempted to transform constraint {c.name} with existing scaling factor. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)

st = get_constraint_transform_applied_scaling_factor(c, default=None)

if not overwrite and st is not None:
Expand Down
83 changes: 83 additions & 0 deletions idaes/core/util/tests/test_scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,6 +464,53 @@ def c2(b, i):
assert sc.get_scaling_factor(m.z[i]) is None


@pytest.mark.unit
def test_set_scaling_factor_transformed_constraint():
m = pyo.ConcreteModel()
m.x = pyo.Var(initialize=500)
m.c1 = pyo.Constraint(expr=m.x <= 1e3)
m.c2 = pyo.Constraint(expr=m.x == 1e3)
m.c3 = pyo.Constraint(expr=m.x >= 1e3)

def indexed_constraint_rule(b, idx):
return b.x == 1e3

m.c4 = pyo.Constraint([1, 2, 3], rule=indexed_constraint_rule)

def match(cname):
return re.escape(
f"Attempted to set constraint scaling factor for transformed constraint {cname}. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)

sc.constraint_scaling_transform(m.c1, 1e-3)
sc.constraint_scaling_transform(m.c2, 1e-3)
sc.constraint_scaling_transform(m.c3, 1e-3)
for idx in m.c4:
sc.constraint_scaling_transform(m.c4[idx], 1e-3)

with pytest.raises(RuntimeError, match=match("c1")):
sc.set_scaling_factor(m.c1, 1e-3)
with pytest.raises(RuntimeError, match=match("c2")):
sc.set_scaling_factor(m.c2, 1e-3)
with pytest.raises(RuntimeError, match=match("c3")):
sc.set_scaling_factor(m.c3, 1e-3)
with pytest.raises(
RuntimeError,
match=re.escape(
"Attempted to set constraint scaling factor for indexed constraint c4"
"with transformed ConstraintData children. Please use only one of "
"set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
),
):
sc.set_scaling_factor(m.c4, 1e-3)
for idx in m.c4:
with pytest.raises(RuntimeError, match=match(f"c4[{idx}]")):
sc.set_scaling_factor(m.c4[idx], 1e-3)


@pytest.mark.unit
def test_set_and_get_scaling_factor():
m = pyo.ConcreteModel()
Expand Down Expand Up @@ -698,6 +745,42 @@ def test_remove_units(self, model):
assert model.constraint_transformed_scaling_factor[model.c2] == 1e-3


@pytest.mark.unit
def test_constraint_scaling_transform_existing_scaling_factor():
m = pyo.ConcreteModel()
m.x = pyo.Var(initialize=500)
m.c1 = pyo.Constraint(expr=m.x <= 1e3)
m.c2 = pyo.Constraint(expr=m.x == 1e3)
m.c3 = pyo.Constraint(expr=m.x >= 1e3)

def indexed_constraint_rule(b, idx):
return b.x == 1e3

m.c4 = pyo.Constraint([1, 2, 3], rule=indexed_constraint_rule)

def match(cname):
return re.escape(
f"Attempted to transform constraint {cname} with existing scaling factor. "
"Please use only one of set_scaling_factor and constraint_scaling_transform "
"per constraint to avoid double scaling."
)

sc.set_scaling_factor(m.c1, 1e-3)
sc.set_scaling_factor(m.c2, 1e-3)
sc.set_scaling_factor(m.c3, 1e-3)
sc.set_scaling_factor(m.c4, 1e-3)

with pytest.raises(RuntimeError, match=match("c1")):
sc.constraint_scaling_transform(m.c1, 1e-3)
with pytest.raises(RuntimeError, match=match("c2")):
sc.constraint_scaling_transform(m.c2, 1e-3)
with pytest.raises(RuntimeError, match=match("c3")):
sc.constraint_scaling_transform(m.c3, 1e-3)
for idx in range(1, 4):
with pytest.raises(RuntimeError, match=match(f"c4[{idx}]")):
sc.constraint_scaling_transform(m.c4[idx], 1e-3)


class TestScaleSingleConstraint:
@pytest.fixture(scope="class")
def model(self):
Expand Down
Loading
Loading