Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 27 additions & 23 deletions include/tinyopt/optimizers/optimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ class Optimizer {
"❌ Error: Parameters dimensions cannot be 0 or Dynamic at "
"execution time");
return StopReason::kSkipped;
} else if (dims < 0) {
TINYOPT_LOG("❌ Error: Parameters dimensions is negative: {} ", dims);
return StopReason::kSkipped;
}

// Resize the solver if needed TODO move?
Expand Down Expand Up @@ -450,6 +453,28 @@ class Optimizer {
out.deltas2.emplace_back(dx_norm2);
out.successes.emplace_back(is_good_step);

// Update output struct
if (is_good_step || iter == 0) { /* GOOD Step */
// Note: we guess it's a good step in the first iteration
if (iter > 0) solver_.GoodStep(options_.use_step_quality_approx ? rel_derr : 0.0f);
out.num_consec_failures = 0;
out.final_cost = cost;
out.final_rerr_dec = rel_derr;
} else { /* BAD Step */
solver_.BadStep();
out.num_failures++;
out.num_consec_failures++;
if (options_.max_consec_failures > 0 &&
out.num_consec_failures >= options_.max_consec_failures) {
out.stop_reason = StopReason::kMaxConsecNoDecr;
return status;
}
if (options_.max_total_failures > 0 && out.num_failures >= options_.max_total_failures) {
out.stop_reason = StopReason::kMaxNoDecr;
return status;
}
}

// Log
if (options_.log.enable) {
std::ostringstream oss;
Expand Down Expand Up @@ -478,7 +503,8 @@ class Optimizer {

// Print error/cost
oss << TINYOPT_FORMAT_NS::format("{}:{:.4e} n:{} d{}:{:+.2e} r{}:{:+.1e} ", options_.log.e,
err, nerr, options_.log.e, derr, options_.log.e, rel_derr);
err, nerr, options_.log.e, iter == 0 ? 0.0f : derr,
options_.log.e, rel_derr);

// Print step info
oss << TINYOPT_FORMAT_NS::format("|δx|:{:.2e} ", sqrt(dx_norm2));
Expand All @@ -505,28 +531,6 @@ class Optimizer {
TINYOPT_LOG("{}", oss.str());
}

// Update output struct
if (is_good_step) { /* GOOD Step */
// Note: we guess it's a good step in the first iteration
solver_.GoodStep(options_.use_step_quality_approx ? rel_derr : 0.0f);
out.num_consec_failures = 0;
out.final_cost = cost;
out.final_rerr_dec = rel_derr;
} else { /* BAD Step */
solver_.BadStep();
out.num_failures++;
out.num_consec_failures++;
if (options_.max_consec_failures > 0 &&
out.num_consec_failures >= options_.max_consec_failures) {
out.stop_reason = StopReason::kMaxConsecNoDecr;
return status;
}
if (options_.max_total_failures > 0 && out.num_failures >= options_.max_total_failures) {
out.stop_reason = StopReason::kMaxNoDecr;
return status;
}
}

// Detect if we need to stop
if (solver_failed)
out.stop_reason = StopReason::kSolverFailed;
Expand Down
12 changes: 2 additions & 10 deletions include/tinyopt/solvers/lm.h
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
lambda_ = options_.damping_init;
prev_lambda_ = 0;
bad_factor_ = options_.bad_factor;
steps_count_ = 0;
rebuild_linear_system_ = true;
}

Expand Down Expand Up @@ -133,7 +132,7 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
}
}

// Damping
// Damping the diagonal: d' = d + lambda*d
if (lambda_ > 0.0) {
const double s =
rebuild_linear_system_ ? 1.0 + lambda_ : (1.0 + lambda_) / (1.0 + prev_lambda_);
Expand Down Expand Up @@ -163,20 +162,14 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
prev_lambda_ = lambda_;
lambda_ = std::clamp<Scalar>(lambda_ * s, options_.damping_range[0], options_.damping_range[1]);
bad_factor_ = options_.bad_factor;
if (steps_count_ < 3) steps_count_++;
}

/// Damping stategy for a bad step: decrease the damping factor \lambda
void BadStep(Scalar /*quality*/ = 0.0f) override {
Scalar s = bad_factor_; // Scale to apply on damping lambda

// Check whether the very first step was actually wrong and revert the scale applied to lambda
if (steps_count_ == 1) s /= options_.good_factor;

prev_lambda_ = lambda_;
lambda_ = std::clamp<Scalar>(lambda_ * s, options_.damping_range[0], options_.damping_range[1]);
bad_factor_ *= options_.bad_factor;
if (steps_count_ < 3) steps_count_++;
}

/// Damping stategy for a failure to solve the linear system, decrease the damping factor \lambda
Expand Down Expand Up @@ -224,9 +217,8 @@ class SolverLM : public tinyopt::solvers::SolverGN<Hessian_t> {
protected:
const Options options_;
Scalar lambda_ = 1e-4f; ///< Initial damping factor (\lambda)
Scalar prev_lambda_ = 0; ///< Previous damping factor (0 at start)
Scalar prev_lambda_ = 0.0f; ///< Previous damping factor (0 at start)
Scalar bad_factor_ = 2.0f; ///< Current damping scaling factor for bad steps
int steps_count_ = 0; ///< Count all steps until 2nd one, used to
bool rebuild_linear_system_ = true; ///< Whether the linear system (H and gradient) have to be
///< rebuilt or a simple evaluation can do it.
};
Expand Down
Loading