Skip to content

Commit acbf0e6

Browse files
committed
Fix convergence of trickier problems
The optimizer now does not assume the first step is a good one and use the came damping lambda for the second iteration. This solves a potential case where it's hard to exit the wrong step.
1 parent 2cef9fc commit acbf0e6

File tree

2 files changed

+28
-32
lines changed

2 files changed

+28
-32
lines changed

include/tinyopt/optimizers/optimizer.h

Lines changed: 27 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,9 @@ class Optimizer {
8484
"❌ Error: Parameters dimensions cannot be 0 or Dynamic at "
8585
"execution time");
8686
return StopReason::kSkipped;
87+
} else if (dims < 0) {
88+
TINYOPT_LOG("❌ Error: Parameters dimensions is negative: {} ", dims);
89+
return StopReason::kSkipped;
8790
}
8891

8992
// Resize the solver if needed TODO move?
@@ -450,6 +453,28 @@ class Optimizer {
450453
out.deltas2.emplace_back(dx_norm2);
451454
out.successes.emplace_back(is_good_step);
452455

456+
// Update output struct
457+
if (is_good_step || iter == 0) { /* GOOD Step */
458+
// Note: we guess it's a good step in the first iteration
459+
if (iter > 0) solver_.GoodStep(options_.use_step_quality_approx ? rel_derr : 0.0f);
460+
out.num_consec_failures = 0;
461+
out.final_cost = cost;
462+
out.final_rerr_dec = rel_derr;
463+
} else { /* BAD Step */
464+
solver_.BadStep();
465+
out.num_failures++;
466+
out.num_consec_failures++;
467+
if (options_.max_consec_failures > 0 &&
468+
out.num_consec_failures >= options_.max_consec_failures) {
469+
out.stop_reason = StopReason::kMaxConsecNoDecr;
470+
return status;
471+
}
472+
if (options_.max_total_failures > 0 && out.num_failures >= options_.max_total_failures) {
473+
out.stop_reason = StopReason::kMaxNoDecr;
474+
return status;
475+
}
476+
}
477+
453478
// Log
454479
if (options_.log.enable) {
455480
std::ostringstream oss;
@@ -478,7 +503,8 @@ class Optimizer {
478503

479504
// Print error/cost
480505
oss << TINYOPT_FORMAT_NS::format("{}:{:.4e} n:{} d{}:{:+.2e} r{}:{:+.1e} ", options_.log.e,
481-
err, nerr, options_.log.e, derr, options_.log.e, rel_derr);
506+
err, nerr, options_.log.e, iter == 0 ? 0.0f : derr,
507+
options_.log.e, rel_derr);
482508

483509
// Print step info
484510
oss << TINYOPT_FORMAT_NS::format("|δx|:{:.2e} ", sqrt(dx_norm2));
@@ -505,28 +531,6 @@ class Optimizer {
505531
TINYOPT_LOG("{}", oss.str());
506532
}
507533

508-
// Update output struct
509-
if (is_good_step) { /* GOOD Step */
510-
// Note: we guess it's a good step in the first iteration
511-
solver_.GoodStep(options_.use_step_quality_approx ? rel_derr : 0.0f);
512-
out.num_consec_failures = 0;
513-
out.final_cost = cost;
514-
out.final_rerr_dec = rel_derr;
515-
} else { /* BAD Step */
516-
solver_.BadStep();
517-
out.num_failures++;
518-
out.num_consec_failures++;
519-
if (options_.max_consec_failures > 0 &&
520-
out.num_consec_failures >= options_.max_consec_failures) {
521-
out.stop_reason = StopReason::kMaxConsecNoDecr;
522-
return status;
523-
}
524-
if (options_.max_total_failures > 0 && out.num_failures >= options_.max_total_failures) {
525-
out.stop_reason = StopReason::kMaxNoDecr;
526-
return status;
527-
}
528-
}
529-
530534
// Detect if we need to stop
531535
if (solver_failed)
532536
out.stop_reason = StopReason::kSolverFailed;

include/tinyopt/solvers/lm.h

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,6 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
7676
lambda_ = options_.damping_init;
7777
prev_lambda_ = 0;
7878
bad_factor_ = options_.bad_factor;
79-
steps_count_ = 0;
8079
rebuild_linear_system_ = true;
8180
}
8281

@@ -163,20 +162,14 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
163162
prev_lambda_ = lambda_;
164163
lambda_ = std::clamp<Scalar>(lambda_ * s, options_.damping_range[0], options_.damping_range[1]);
165164
bad_factor_ = options_.bad_factor;
166-
if (steps_count_ < 3) steps_count_++;
167165
}
168166

169167
/// Damping stategy for a bad step: decrease the damping factor \lambda
170168
void BadStep(Scalar /*quality*/ = 0.0f) override {
171169
Scalar s = bad_factor_; // Scale to apply on damping lambda
172-
173-
// Check whether the very first step was actually wrong and revert the scale applied to lambda
174-
if (steps_count_ == 1) s /= options_.good_factor;
175-
176170
prev_lambda_ = lambda_;
177171
lambda_ = std::clamp<Scalar>(lambda_ * s, options_.damping_range[0], options_.damping_range[1]);
178172
bad_factor_ *= options_.bad_factor;
179-
if (steps_count_ < 3) steps_count_++;
180173
}
181174

182175
/// Damping stategy for a failure to solve the linear system, decrease the damping factor \lambda
@@ -224,9 +217,8 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
224217
protected:
225218
const Options options_;
226219
Scalar lambda_ = 1e-4f; ///< Initial damping factor (\lambda)
227-
Scalar prev_lambda_ = 0; ///< Previous damping factor (0 at start)
220+
Scalar prev_lambda_ = 0.0f; ///< Previous damping factor (0 at start)
228221
Scalar bad_factor_ = 2.0f; ///< Current damping scaling factor for bad steps
229-
int steps_count_ = 0; ///< Count all steps until 2nd one, used to
230222
bool rebuild_linear_system_ = true; ///< Whether the linear system (H and gradient) have to be
231223
///< rebuilt or a simple evaluation can do it.
232224
};

0 commit comments

Comments
 (0)