Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,5 @@ sphinx==7.1.*
nbsphinx
sphinx_rtd_theme

idaes-pse @ git+https://github.com/IDAES/[email protected]
-e .[testing,notebooks,oli_api]
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
# primary requirements for unit and property models
# allow X.Y.Z stable release(s) and X.Y+1.dev0 (i.e. the main branch after X.Y.Z)
# disallow X.Y+1.0rc0 (i.e. forcing a manual update to this requirement)
"idaes-pse >=2.9.0,<2.10.0rc0",
"idaes-pse @ git+https://github.com/IDAES/idaes-pse.git@2.10.0rc0",
"pyomo>=6.6.1",
"watertap-solvers",
"pyyaml", # watertap.core.wt_database
Expand Down
6 changes: 3 additions & 3 deletions watertap/core/membrane_channel1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,14 +242,14 @@ def initialize(
self.release_state(source_flags, outlvl)

def calculate_scaling_factors(self):
if iscale.get_scaling_factor(self.area) is None:
iscale.set_scaling_factor(self.area, 100)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we also display a warning to the user that we are doing this since so scaling factor was provided?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do the old scaling tools really need additional warnings to spam the user's logs?

In the new scaling tools, I'm going to raise an exception instead, so they can no longer ignore it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@dallan-keylogic in the new scaling tools, could you think about perhaps a setting where the user could choose to have exceptions raised or bypass (with warnings if you prefer)? The default would be to raise exceptions, but it'd be nice to have an escape route for devs (for whatever exploratory reasons someone might have).


super().calculate_scaling_factors()

# setting scaling factors for variables

# will not override if the user provides the scaling factor
## default of 1 set by ControlVolume1D
if iscale.get_scaling_factor(self.area) == 1:
iscale.set_scaling_factor(self.area, 100)

if hasattr(self, "pressure_change_total"):
for v in self.pressure_change_total.values():
Expand Down
72 changes: 71 additions & 1 deletion watertap/core/membrane_channel_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
Expression,
Param,
NonNegativeReals,
value,
Var,
units as pyunits,
)
Expand Down Expand Up @@ -996,7 +997,11 @@ def calculate_scaling_factors(self):
if hasattr(self, "velocity"):
for v in self.velocity.values():
if iscale.get_scaling_factor(v) is None:
iscale.set_scaling_factor(v, 1)
sf_A = iscale.get_scaling_factor(self.area, default=1)
sf_F_vol = iscale.get_scaling_factor(
self.properties[t, x].flow_vol_phase["Liq"], default=1
)
iscale.set_scaling_factor(v, sf_F_vol / sf_A, 1)

if hasattr(self, "friction_factor_darcy"):
for v in self.friction_factor_darcy.values():
Expand All @@ -1008,6 +1013,71 @@ def calculate_scaling_factors(self):
if iscale.get_scaling_factor(v) is None:
iscale.set_scaling_factor(v, 1)

# TODO why did this file have zero scaling factors for constraints
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@bknueven can probably give a more informative response, but the line of thinking was that scaling all variables should suffice in most cases without the need to scale constraints. However, we know that this isn't always the case, as we've experienced particularly when dealing with getting "complex" models to converge.

if hasattr(self, "eq_dh"):
sf_dh = iscale.get_scaling_factor(self.dh, default=1)
iscale.constraint_scaling_transform(self.eq_dh, sf_dh)

if hasattr(self, "eq_area"):
sf_A = iscale.get_scaling_factor(self.area, default=1)
iscale.constraint_scaling_transform(self.eq_area, sf_A)

if hasattr(self, "eq_velocity"):
for (t, x), condata in self.eq_velocity.items():
sf_F_vol = iscale.get_scaling_factor(
self.properties[t, x].flow_vol_phase["Liq"], default=1
)
iscale.constraint_scaling_transform(condata, sf_F_vol)

if hasattr(self, "eq_equal_flow_vol_interface"):
for (t, x), condata in self.eq_equal_flow_vol_interface.items():
sf_F_vol = min(
iscale.get_scaling_factor(
self.properties[t, x].flow_vol_phase["Liq"], default=1
),
iscale.get_scaling_factor(
self.properties_interface[t, x].flow_vol_phase["Liq"], default=1
),
)
iscale.constraint_scaling_transform(condata, sf_F_vol)

if hasattr(self, "eq_equal_pressure_interface"):
for (t, x), condata in self.eq_equal_pressure_interface.items():
sf_P = min(
iscale.get_scaling_factor(
self.properties_interface[t, x].pressure, default=1
),
iscale.get_scaling_factor(
self.properties[t, x].pressure, default=1
),
)
iscale.constraint_scaling_transform(condata, sf_P)

if hasattr(self, "eq_K"):
for (t, x, j), condata in self.eq_K.items():
sf_K = iscale.get_scaling_factor(self.K[t, x, j])
# Hopefully h has been calculated before this
sf_h = 1 / value(self.dh)
iscale.constraint_scaling_transform(condata, sf_K * sf_h)

if hasattr(self, "eq_N_Re"):
for (t, x), condata in self.eq_N_Re.items():
sf_Re = iscale.get_scaling_factor(self.N_Re[t, x], default=1)
sf_A = iscale.get_scaling_factor(
self.area, default=1 / value(self.area)
)
sf_visc = iscale.get_scaling_factor(
self.properties[t, x].visc_d_phase["Liq"], default=1
)
iscale.constraint_scaling_transform(condata, sf_Re * sf_A * sf_visc)

if hasattr(self, "eq_N_Sc_comp"):
for (t, x, j), condata in self.eq_N_Sc_comp.items():
sf_visc = iscale.get_scaling_factor(
self.properties[t, x].visc_d_phase["Liq"], default=1
)
iscale.constraint_scaling_transform(condata, sf_visc)


# helper for validating configuration arguments for this CV
def validate_membrane_config_args(unit):
Expand Down
30 changes: 20 additions & 10 deletions watertap/core/zero_order_sido.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,16 +282,17 @@ def initialize_sido(

def calculate_scaling_factors_sido(self):
# Get default scale factors and do calculations from base classes
for t, v in self.water_recovery_equation.items():
iscale.constraint_scaling_transform(
v,
iscale.get_scaling_factor(
self.properties_in[t].flow_mass_comp["H2O"],
default=1,
warning=True,
hint=" for water recovery",
),
)
if hasattr(self, "water_recovery_equation"):
for t, v in self.water_recovery_equation.items():
iscale.constraint_scaling_transform(
v,
iscale.get_scaling_factor(
self.properties_in[t].flow_mass_comp["H2O"],
default=1,
warning=True,
hint=" for water recovery",
),
)

for t, v in self.water_balance.items():
iscale.constraint_scaling_transform(
Expand Down Expand Up @@ -320,6 +321,15 @@ def calculate_scaling_factors_sido(self):
),
) # would just be a duplicate of above

if hasattr(self, "eq_isobaric"):
for (t, port), condata in self.eq_isobaric.items():
obj = getattr(self, port)
sf_P = min(
iscale.get_scaling_factor(self.inlet.pressure[t], default=1),
iscale.get_scaling_factor(obj.pressure[t], default=1),
)
iscale.constraint_scaling_transform(condata, sf_P)


def _get_Q_sido(self, t):
return self.properties_in[t].flow_vol
Original file line number Diff line number Diff line change
Expand Up @@ -1042,7 +1042,9 @@ def initialize_system(m):
seq.set_guesses_for(m.fs.R1.inlet, tear_guesses1)
seq.set_guesses_for(m.fs.asm_adm.inlet, tear_guesses2)

initializer = BlockTriangularizationInitializer()
initializer = BlockTriangularizationInitializer(
calculate_variable_options={"eps": 2e-8}, skip_final_solve=True
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

how is eps applied through calculate_variable_options?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is the tolerance used to check whether the constraint residual is small enough to consider the constraint "satisfied". When solving the flowsheet, I encountered a problem where it couldn't reduce the residual of some enthalpy constraint (i.e., a constraint that needs a scaling factor <<1) to less than 1e-8. Due to Pyomo/pyomo#3785 , it does not take the constraint's scaling factor into account when calculating this residual. Using a constraint_scaling_transformation would allow scaling to be taken into account, but we wanted to move away from that in the new scaling tools.

)

def function(unit):
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -502,52 +502,69 @@ def scale_system(m, bio_P=False):
ad_scaler = ADScaler()
ad_scaler.scale_model(m.fs.AD)

set_scaling_factor(m.fs.AD.KH_co2, 1e1)
set_scaling_factor(m.fs.AD.KH_ch4, 1e1)
set_scaling_factor(m.fs.AD.KH_h2, 1e2)
set_scaling_factor(m.fs.AD.liquid_phase.heat, 1e1)
for vardata in m.fs.AD.KH_co2.values():
set_scaling_factor(vardata, 1e1)
for vardata in m.fs.AD.KH_ch4.values():
set_scaling_factor(vardata, 1e1)
for vardata in m.fs.AD.KH_h2.values():
set_scaling_factor(vardata, 1e2)
for vardata in m.fs.AD.liquid_phase.heat.values():
set_scaling_factor(vardata, 1e1)
if bio_P:
set_scaling_factor(m.fs.AD.liquid_phase.reactions[0].S_H, 1e1)
for blkdata in m.fs.AD.liquid_phase.reactions.values():
set_scaling_factor(blkdata.S_H, 1e1)
else:
set_scaling_factor(m.fs.AD.liquid_phase.reactions[0].S_H, 1e2)
for blkdata in m.fs.AD.liquid_phase.reactions.values():
set_scaling_factor(blkdata.S_H, 1e2)

cstr_list = [m.fs.R1, m.fs.R2, m.fs.R3, m.fs.R4]
cstr_scaler = CSTRScaler()
for unit in cstr_list:
cstr_scaler.scale_model(unit)

for unit in cstr_list:
set_scaling_factor(unit.hydraulic_retention_time, 1e-3)
for vardata in unit.hydraulic_retention_time.values():
set_scaling_factor(vardata, 1e-3)

aeration_list = [m.fs.R5, m.fs.R6, m.fs.R7]
aeration_scaler = AerationTankScaler()
for unit in aeration_list:
aeration_scaler.scale_model(unit)

for R in aeration_list:
set_scaling_factor(R.KLa, 1e-1)
for vardata in R.KLa.values():
set_scaling_factor(vardata, 1e-1)
if bio_P:
set_scaling_factor(R.hydraulic_retention_time[0], 1e-2)
for vardata in R.hydraulic_retention_time.values():
set_scaling_factor(vardata, 1e-2)

reactor_list = [m.fs.R1, m.fs.R2, m.fs.R3, m.fs.R4, m.fs.R5, m.fs.R6, m.fs.R7]
for unit in reactor_list:
set_scaling_factor(unit.control_volume.reactions[0.0].rate_expression, 1e3)
set_scaling_factor(unit.cstr_performance_eqn, 1e3)
set_scaling_factor(
unit.control_volume.rate_reaction_stoichiometry_constraint, 1e3
)
set_scaling_factor(unit.control_volume.material_balances, 1e3)
for blkdata in unit.control_volume.reactions.values():
for vardata in blkdata.rate_expression.values():
set_scaling_factor(vardata, 1e3)
for condata in unit.cstr_performance_eqn.values():
set_scaling_factor(condata, 1e3)
for (
condata
) in unit.control_volume.rate_reaction_stoichiometry_constraint.values():
set_scaling_factor(condata, 1e3)
for condata in unit.control_volume.material_balances.values():
set_scaling_factor(condata, 1e3)

if bio_P:
set_scaling_factor(
m.fs.R5.control_volume.rate_reaction_generation[0, "Liq", "S_I"], 1e-3
)
constraint_scaling_transform(
m.fs.R5.control_volume.rate_reaction_stoichiometry_constraint[
0, "Liq", "H2O"
],
1e-6,
)
for t in m.fs.time:
set_scaling_factor(
m.fs.R5.control_volume.rate_reaction_generation[t, "Liq", "S_I"], 1e-3
)
# TODO is it intentional that we're setting scaling factors for some constraints
# but doing constraint scaling transforms for others?
Comment on lines +560 to +561
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not intentional--traditionally we applied constraint scaling transforms.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this appears to contradict @MarcusHolly in #1695

constraint_scaling_transform(
m.fs.R5.control_volume.rate_reaction_stoichiometry_constraint[
t, "Liq", "H2O"
],
1e-6,
)

clarifier_list = [m.fs.CL, m.fs.CL2]
clarifier_scaler = ClarifierScaler()
Expand All @@ -566,7 +583,8 @@ def scale_system(m, bio_P=False):
ad_as_scaler = ADM1ASM2dScaler()
ad_as_scaler.scale_model(m.fs.translator_adm1_asm2d)

set_scaling_factor(m.fs.P1.control_volume.work[0], 1e-2)
for vardata in m.fs.P1.control_volume.work.values():
set_scaling_factor(vardata, 1e-2)

for var in m.fs.component_data_objects(pyo.Var, descend_into=True):
if "flow_vol" in var.name:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,17 +75,9 @@ def test_structural_issues(self, system_frame):
@pytest.mark.component
def test_numerical_issues(self, system_frame):
dt = DiagnosticsToolbox(system_frame)
warnings, next_steps = dt._collect_numerical_warnings()
assert len(warnings) == 3
warnings, _ = dt._collect_numerical_warnings()
assert len(warnings) == 1
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nice

assert "WARNING: 3 Variables at or outside bounds (tol=0.0E+00)" in warnings
assert (
"WARNING: 2 Variables with extreme Jacobian values (<1.0E-08 or >1.0E+08)"
in warnings
)
assert (
"WARNING: 2 Constraints with extreme Jacobian values (<1.0E-08 or >1.0E+08)"
in warnings
)

@pytest.mark.component
def test_solve(self, system_frame):
Expand Down Expand Up @@ -201,17 +193,9 @@ def test_structural_issues(self, system_frame):
@pytest.mark.component
def test_numerical_issues(self, system_frame):
dt = DiagnosticsToolbox(system_frame)
warnings, next_steps = dt._collect_numerical_warnings()
assert len(warnings) == 3
warnings, _ = dt._collect_numerical_warnings()
assert len(warnings) == 1
assert "WARNING: 3 Variables at or outside bounds (tol=0.0E+00)" in warnings
assert (
"WARNING: 2 Variables with extreme Jacobian values (<1.0E-08 or >1.0E+08)"
in warnings
)
assert (
"WARNING: 2 Constraints with extreme Jacobian values (<1.0E-08 or >1.0E+08)"
in warnings
)

@pytest.mark.component
def test_solve(self, system_frame):
Expand Down Expand Up @@ -334,13 +318,30 @@ def test_numerical_issues(self, system_frame):

@pytest.mark.solver
@pytest.mark.component
def test_condition_number(self, system_frame):
@linux_platform_only
def test_condition_number_on_linux(self, system_frame):
m = system_frame

# Check condition number to confirm scaling
jac, _ = get_jacobian(m, scaled=False)
assert (jacobian_cond(jac=jac, scaled=False)) == pytest.approx(
3.6954462e15, rel=1e-3
assert jacobian_cond(jac=jac, scaled=False) == pytest.approx(
2.987650e15, rel=1e-2
)

@pytest.mark.solver
@pytest.mark.component
@reference_platform_only
def test_condition_number_on_windows(self, system_frame):
m = system_frame

# Check condition number to confirm scaling
jac, _ = get_jacobian(m, scaled=False)
cond = jacobian_cond(jac=jac, scaled=False)
assert (
# Python 3.9-3.11
cond == pytest.approx(4.3021828e15, rel=1e-2)
# Python 3.12
or cond == pytest.approx(2.987651e15, rel=1e-2)
)


Expand Down Expand Up @@ -377,7 +378,7 @@ def test_condition_number_on_linux(self, system_frame):
jac, _ = get_jacobian(sm, scaled=False)

assert (jacobian_cond(jac=jac, scaled=False)) == pytest.approx(
8.89313720973467e14, rel=1e-3
7.42017e14, rel=1e-3
)

@pytest.mark.solver
Expand All @@ -391,5 +392,5 @@ def test_condition_number_on_windows(self, system_frame):
jac, _ = get_jacobian(sm, scaled=False)

assert (jacobian_cond(jac=jac, scaled=False)) == pytest.approx(
8.82538591e14, rel=1e-2
7.43640e14, rel=1e-2
)
Loading
Loading