Skip to content

Commit e9e34b4

Browse files
emma58jalving
andauthored
Fix hybrid bigm formulation for linear trees (#164)
The changes in #163 included changes to the hybrid bigm formulation for linear tree that, while mathematically equivalent, made for a larger formulation in terms of number of constraints. This PR corrects that: It still uses the `gdp.bound_pretransformation` to generate the constraints bounding the features values for each leaf, but it manually transforms the constraints setting the output value to the leaf's linear function, equivalently to @bammari's original implementation. In addition it adds a test to check that the size of the resulting formulation is what is expected. **Legal Acknowledgement**\ By contributing to this software project, I agree my contributions are submitted under the BSD license. I represent I am authorized to make the contributions and grant the license. If my employer has rights to intellectual property that includes these contributions, I represent that I have received permission to make contributions and grant the required license on behalf of that employer. --------- Co-authored-by: Emma Johnson <[email protected]> Co-authored-by: jalving <[email protected]>
1 parent 321a2e2 commit e9e34b4

File tree

8 files changed

+250
-201
lines changed

8 files changed

+250
-201
lines changed

docs/notebooks/neuralnet/graph_neural_network_formulation.ipynb

Lines changed: 171 additions & 176 deletions
Large diffs are not rendered by default.

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -74,12 +74,12 @@ extend-exclude = ["src/omlt/_version.py"]
7474
[tool.ruff.lint]
7575
select = ["ALL"]
7676
ignore = [
77-
"ANN101",
7877
"ANN401",
7978
"COM812",
8079
"ISC001",
8180
"SLF001",
8281
"ARG001",
82+
"PLC0206",
8383
"N803",
8484
"N806",
8585
# Remove these after issue https://github.com/cog-imperial/OMLT/issues/153 is fixed.
@@ -96,7 +96,7 @@ ignore = [
9696
"ANN002",
9797
"ANN201",
9898
"ANN202",
99-
"ANN204",
99+
"ANN204"
100100
]
101101

102102
[tool.ruff.lint.pydocstyle]

src/omlt/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from omlt.scaling import OffsetScaling
1414

1515
__all__ = [
16-
"OmltBlock",
1716
"OffsetScaling",
17+
"OmltBlock",
1818
"__version__",
1919
]

src/omlt/io/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,11 @@
1717

1818
__all__ = [
1919
"keras_available",
20+
"load_keras_sequential",
21+
"load_onnx_neural_network",
22+
"load_onnx_neural_network_with_bounds",
2023
"onnx_available",
2124
"torch_available",
2225
"torch_geometric_available",
23-
"load_onnx_neural_network",
24-
"load_onnx_neural_network_with_bounds",
2526
"write_onnx_model_with_bounds",
26-
"load_keras_sequential",
2727
]

src/omlt/linear_tree/lt_formulation.py

Lines changed: 51 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@ def _build_formulation(self):
106106
output_vars=self.block.scaled_outputs,
107107
transformation=self.transformation,
108108
epsilon=self.epsilon,
109+
include_leaf_equalities=True,
109110
)
110111

111112

@@ -170,23 +171,51 @@ def _build_formulation(self):
170171
This method is called by the OmltBlock to build the corresponding
171172
mathematical formulation on the Pyomo block.
172173
"""
174+
block = self.block
175+
leaves = self.model_definition.leaves
176+
173177
_setup_scaled_inputs_outputs(
174-
self.block,
178+
block,
175179
self.model_definition.scaling_object,
176180
self.model_definition.scaled_input_bounds,
177181
)
178182

183+
input_vars = self.block.scaled_inputs
184+
179185
_add_gdp_formulation_to_block(
180-
block=self.block,
186+
block=block,
181187
model_definition=self.model_definition,
182-
input_vars=self.block.scaled_inputs,
188+
input_vars=input_vars,
183189
output_vars=self.block.scaled_outputs,
184190
transformation="custom",
185191
epsilon=self.epsilon,
192+
include_leaf_equalities=False,
186193
)
187194

188-
pe.TransformationFactory("gdp.bound_pretransformation").apply_to(self.block)
189-
pe.TransformationFactory("gdp.binary_multiplication").apply_to(self.block)
195+
pe.TransformationFactory("gdp.bound_pretransformation").apply_to(block)
196+
# It doesn't really matter what transformation we call next, so we just
197+
# use bigm--all it's going to do is create the exactly-one constraints
198+
# and mark all the disjunctive parts of the model as transformed.
199+
pe.TransformationFactory("gdp.bigm").apply_to(block)
200+
201+
# We now create the \sum((a_l^Tx + b_l)*y_l for l in leaves) = d constraints
202+
# manually.
203+
features = np.arange(0, self.model_definition.n_inputs)
204+
205+
@block.Constraint(list(leaves.keys()))
206+
def linear_constraint(mdl, tree):
207+
leaf_ids = list(leaves[tree].keys())
208+
return block.intermediate_output[tree] == sum(
209+
(
210+
sum(
211+
leaves[tree][leaf]["slope"][feat] * input_vars[feat]
212+
for feat in features
213+
)
214+
+ leaves[tree][leaf]["intercept"]
215+
)
216+
* block.disjunct[tree, leaf].binary_indicator_var
217+
for leaf in leaf_ids
218+
)
190219

191220

192221
def _build_output_bounds(model_def, input_bounds):
@@ -232,7 +261,13 @@ def _build_output_bounds(model_def, input_bounds):
232261

233262

234263
def _add_gdp_formulation_to_block( # noqa: PLR0913
235-
block, model_definition, input_vars, output_vars, transformation, epsilon
264+
block,
265+
model_definition,
266+
input_vars,
267+
output_vars,
268+
transformation,
269+
epsilon,
270+
include_leaf_equalities,
236271
):
237272
"""This function adds the GDP representation to the OmltBlock using Pyomo.GDP.
238273
@@ -245,7 +280,9 @@ def _add_gdp_formulation_to_block( # noqa: PLR0913
245280
epsilon: Tolerance to use in enforcing that choosing the right
246281
branch of a linear tree node can only happen if the feature
247282
is strictly greater than the branch value.
248-
283+
include_leaf_equalities: boolean to indicate if the formulation
284+
should include the equalities setting the leaf values or not.
285+
(default: True)
249286
"""
250287
leaves = model_definition.leaves
251288
input_bounds = model_definition.scaled_input_bounds
@@ -283,12 +320,13 @@ def ub_rule(dsj, feat):
283320

284321
dsj.ub_constraint = pe.Constraint(features, rule=ub_rule)
285322

286-
slope = leaves[tree][leaf]["slope"]
287-
intercept = leaves[tree][leaf]["intercept"]
288-
dsj.linear_exp = pe.Constraint(
289-
expr=sum(slope[k] * input_vars[k] for k in features) + intercept
290-
== block.intermediate_output[tree]
291-
)
323+
if include_leaf_equalities:
324+
slope = leaves[tree][leaf]["slope"]
325+
intercept = leaves[tree][leaf]["intercept"]
326+
dsj.linear_exp = pe.Constraint(
327+
expr=sum(slope[k] * input_vars[k] for k in features) + intercept
328+
== block.intermediate_output[tree]
329+
)
292330

293331
block.disjunct = Disjunct(t_l, rule=disjuncts_rule)
294332

src/omlt/neuralnet/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,9 @@
3232
)
3333

3434
__all__ = [
35-
"NetworkDefinition",
3635
"FullSpaceNNFormulation",
3736
"FullSpaceSmoothNNFormulation",
37+
"NetworkDefinition",
3838
"ReducedSpaceNNFormulation",
3939
"ReducedSpaceSmoothNNFormulation",
4040
"ReluBigMFormulation",

src/omlt/neuralnet/activations/__init__.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,16 +30,16 @@
3030
NON_INCREASING_ACTIVATIONS: list[Any] = []
3131

3232
__all__ = [
33-
"linear_activation_constraint",
34-
"linear_activation_function",
33+
"ACTIVATION_FUNCTION_MAP",
34+
"NON_INCREASING_ACTIVATIONS",
3535
"ComplementarityReLUActivation",
3636
"bigm_relu_activation_constraint",
37+
"linear_activation_constraint",
38+
"linear_activation_function",
3739
"sigmoid_activation_constraint",
3840
"sigmoid_activation_function",
3941
"softplus_activation_constraint",
4042
"softplus_activation_function",
4143
"tanh_activation_constraint",
4244
"tanh_activation_function",
43-
"ACTIVATION_FUNCTION_MAP",
44-
"NON_INCREASING_ACTIVATIONS",
4545
]

tests/linear_tree/test_lt_formulation.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import numpy as np
22
import pyomo.environ as pe
33
import pytest
4+
from pyomo.common.collections import ComponentSet
5+
from pyomo.core.expr import identify_variables
46

57
from omlt.dependencies import lineartree_available
68

@@ -245,7 +247,7 @@ def test_nonzero_epsilon():
245247
solution = (pe.value(model_good.x), pe.value(model_good.y))
246248
y_pred = regr_small.predict(np.array(solution[0]).reshape(1, -1))
247249
# With epsilon, the model matches the tree prediction
248-
assert y_pred[0] == pytest.approx(solution[1])
250+
assert y_pred[0] == pytest.approx(solution[1], abs=1e-4)
249251

250252

251253
@pytest.mark.skipif(
@@ -657,6 +659,20 @@ def test_hybrid_bigm_formulation_multi_var():
657659
model1.lt = OmltBlock()
658660
model1.lt.build_formulation(formulation1_lt)
659661

662+
num_constraints = 0
663+
var_set = ComponentSet()
664+
for cons in model1.lt.component_data_objects(pe.Constraint, active=True):
665+
num_constraints += 1
666+
for v in identify_variables(cons.expr):
667+
var_set.add(v)
668+
669+
num_leaves = len(ltmodel_small.leaves[0])
670+
# binary for each leaf + two inputs and an output + 5 scaled input/output vars
671+
assert len(var_set) == num_leaves + 3 + 4
672+
# 2 bounds constraints for each input, the xor, the output constraint, and
673+
# four scaling constraints from OMLT
674+
assert num_constraints == 2 * 2 + 1 + 1 + 4
675+
660676
@model1.Constraint()
661677
def connect_input1(mdl):
662678
return mdl.x0 == mdl.lt.inputs[0]

0 commit comments

Comments
 (0)