@@ -95,6 +95,57 @@ def test_classify_using_hessian(self, simulator, dataset):
9595 logger .extend_log (x_search = [x0 ], x_model = [x0 ], cost = [problem (x0 )])
9696 result = pybop .OptimisationResult (optim = optim , logger = logger , time = 1.0 )
9797
98+ message , info = pybop .classify_using_hessian (result )
99+
100+ assert isinstance (info , dict )
101+ for k in (
102+ "hessian_fd" ,
103+ "eigenvalues" ,
104+ "eigenvectors" ,
105+ "x" ,
106+ "dx" ,
107+ "names" ,
108+ "best_cost" ,
109+ "span0" ,
110+ "span1" ,
111+ "param0" ,
112+ "param1" ,
113+ "Z" ,
114+ ):
115+ assert k in info
116+ assert "hessian_fd" in info and info ["hessian_fd" ].shape == (2 , 2 )
117+ assert "eigenvalues" in info and info ["eigenvalues" ].shape == (2 ,)
118+ assert "eigenvectors" in info and info ["eigenvectors" ].shape == (2 , 2 )
119+ assert info ["x" ].shape == (2 ,)
120+ assert "Z" in info and info ["Z" ].ndim == 2
121+ assert info ["Z" ].shape [0 ] == info ["Z" ].shape [1 ] # grid is square
122+
123+ # Hessian should be finite
124+ H = info ["hessian_fd" ]
125+ assert np .isfinite (H ).all ()
126+ assert np .allclose (H , H .T , atol = 1e-8 ) # finite-difference symmetry
127+
128+ # Eigenvalues should be sorted ascending
129+ evals = info ["eigenvalues" ]
130+ assert evals [0 ] <= evals [1 ]
131+
132+ # Eigenvectors should be finite
133+ evecs = info ["eigenvectors" ]
134+ assert np .isfinite (evecs ).all ()
135+ for k in range (evecs .shape [1 ]):
136+ nrm = np .linalg .norm (evecs [:, k ])
137+ assert nrm > 0.0
138+
139+ # Check the grid Z contains at least some finite values
140+ Z = info ["Z" ]
141+ assert np .isfinite (Z ).any ()
142+
143+ # Check p0 and p1
144+ p0 = info ["param0" ]
145+ p1 = info ["param1" ]
146+ assert p0 .min () < x [0 ] < p0 .max ()
147+ assert p1 .min () < x [1 ] < p1 .max ()
148+
98149 if np .all (x == np .asarray ([0.05 , 0.05 ])):
99150 message , _ = pybop .classify_using_hessian (result )
100151 assert message == "The optimiser has located a minimum."
@@ -187,41 +238,4 @@ def test_insensitive_classify_using_hessian(self, model, parameter_values):
187238 assert message == (
188239 "Classification cannot proceed due to infinite cost value(s)."
189240 " The result is near the upper bound of R0_a [Ohm]."
190- )
191-
192- def test_return_info_keys_and_shapes (self , simulator , dataset ):
193- cost = pybop .RootMeanSquaredError (dataset )
194- problem = pybop .Problem (simulator , cost )
195- x = np .asarray ([0.05 , 0.05 ])
196- bounds = problem .parameters .get_bounds ()
197- x0 = np .clip (x , bounds ["lower" ], bounds ["upper" ])
198- optim = pybop .XNES (problem )
199- logger = pybop .Logger (minimising = problem .minimising )
200- logger .iteration = 1
201- logger .extend_log (x_search = [x0 ], x_model = [x0 ], cost = [problem (x0 )])
202- result = pybop .OptimisationResult (optim = optim , logger = logger , time = 1.0 )
203-
204- _ , info = pybop .classify_using_hessian (result )
205- # info must be a dict; check types and shapes
206- assert isinstance (info , dict )
207- for k in (
208- "hessian_fd" ,
209- "eigenvalues" ,
210- "eigenvectors" ,
211- "x" ,
212- "dx" ,
213- "names" ,
214- "best_cost" ,
215- "span0" ,
216- "span1" ,
217- "param0" ,
218- "param1" ,
219- "Z" ,
220- ):
221- assert k in info
222- assert info ["hessian_fd" ].shape == (2 , 2 )
223- assert info ["eigenvalues" ].shape == (2 ,)
224- assert info ["eigenvectors" ].shape == (2 , 2 )
225- assert info ["x" ].shape == (2 ,)
226- assert info ["Z" ].ndim == 2
227- assert info ["Z" ].shape [0 ] == info ["Z" ].shape [1 ] # grid is square
241+ )
0 commit comments