Skip to content

Commit d235c7e

Browse files
authored
improved log output for NLP scaling (#709)
* improved log output for NLP ObjGrad scaling * changed option description
1 parent 4d0d953 commit d235c7e

File tree

3 files changed

+43
-10
lines changed

3 files changed

+43
-10
lines changed

src/Optimization/hiopNlpFormulation.cpp

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -671,6 +671,7 @@ bool hiopNlpFormulation::apply_scaling(hiopVector& c, hiopVector& d, hiopVector&
671671
{
672672
// check if we need to do scaling
673673
if("none" == options->GetString("scaling_type")) {
674+
log->printf(hovScalars, "NLP scaling is disabled.\n");
674675
return false;
675676
}
676677

@@ -686,8 +687,13 @@ bool hiopNlpFormulation::apply_scaling(hiopVector& c, hiopVector& d, hiopVector&
686687
con_grad_target = max_con_grad;
687688
}
688689

689-
if(gradf.infnorm() < obj_grad_target && Jac_c.max_abs_value() < con_grad_target &&
690-
Jac_d.max_abs_value() < con_grad_target) {
690+
const auto gmax = gradf.infnorm();
691+
const auto Jcmax = Jac_c.max_abs_value();
692+
const auto Jdmax = Jac_d.max_abs_value();
693+
if(gmax < obj_grad_target && Jcmax < con_grad_target && Jdmax < con_grad_target) {
694+
log->printf(hovScalars, "No NLP scaling is performed:\n");
695+
log->printf(hovScalars, "\tgrad target %12.5e NLP inf grad %12.5e\n", obj_grad_target, gmax);
696+
log->printf(hovScalars, "\tJac target %12.5e NLP inf/max Jc %12.5e Jd %12.5e\n/", con_grad_target, Jcmax, Jdmax);
691697
return false;
692698
}
693699

src/Optimization/hiopNlpTransforms.cpp

Lines changed: 33 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -448,21 +448,35 @@ hiopNLPObjGradScaling::hiopNLPObjGradScaling(hiopNlpFormulation* nlp,
448448
const double max_obj_grad = nlp_->options->GetNumeric("scaling_max_obj_grad");
449449
const double max_con_grad = nlp_->options->GetNumeric("scaling_max_con_grad");
450450

451+
std::stringstream ss_obj;
452+
451453
const double gradf_infnorm = gradf.infnorm();
452454
scale_factor_obj = 1.;
453455
if(max_obj_grad == 0.) {
454456
if(gradf_infnorm > max_grad) {
455457
scale_factor_obj = max_grad / gradf.infnorm();
458+
ss_obj << "NLPObjGradScaling: NLP objective scaling due to option scaling_max_grad="
459+
<< max_grad << " ";
456460
}
457461
} else {
458462
if(gradf_infnorm > 0.) {
463+
ss_obj << "NLPObjGradScaling: NLP objective scaling due to option scaling_max_obj_grad="
464+
<< max_obj_grad << " ";
459465
scale_factor_obj = max_obj_grad / gradf.infnorm();
460466
}
461467
}
462468
if(min_grad > 0.0 && scale_factor_obj < min_grad) {
463469
scale_factor_obj = min_grad;
470+
ss_obj << "NLPObjGradScaling: NLP objective scaling overwritten by option scaling_min_grad="
471+
<< min_grad << std::endl;
464472
}
465-
473+
if(ss_obj.str().size()>0) {
474+
nlp->log->printf(hovScalars, "%s: scale factor obj %12.5e.\n", ss_obj.str().c_str(), scale_factor_obj);
475+
} else {
476+
nlp->log->printf(hovScalars,
477+
"NLPObjGradScaling: No NLP objective scaling performed due to combination of options.\n");
478+
}
479+
466480
scale_factor_c = c.new_copy();
467481
scale_factor_d = d.new_copy();
468482
scale_factor_cd = LinearAlgebraFactory::create_vector(nlp_->options->GetString("mem_space"), n_eq + n_ineq);
@@ -478,14 +492,16 @@ hiopNLPObjGradScaling::hiopNLPObjGradScaling(hiopNlpFormulation* nlp,
478492
scale_factor_d->invert();
479493

480494
scale_factor_cd->copy_from_two_vec_w_pattern(*scale_factor_c, cons_eq_mapping, *scale_factor_d, cons_ineq_mapping);
481-
495+
std::stringstream ss_cons;
482496
Jac_c.row_max_abs_value(*scale_factor_c);
483497
Jac_d.row_max_abs_value(*scale_factor_d);
484498
if(max_con_grad == 0.) {
485499
if(scale_factor_c->infnorm() > max_grad) {
486500
scale_factor_c->scale(1. / max_grad);
487501
scale_factor_c->component_max(1.0);
488502
scale_factor_c->invert();
503+
ss_cons << "NLPObjGradScaling: NLP constraints (c) scaling due to option scaling_max_grad="
504+
<< max_grad << std::endl;
489505
} else {
490506
scale_factor_c->setToConstant(1.0);
491507
}
@@ -494,24 +510,36 @@ hiopNLPObjGradScaling::hiopNLPObjGradScaling(hiopNlpFormulation* nlp,
494510
scale_factor_d->scale(1. / max_grad);
495511
scale_factor_d->component_max(1.0);
496512
scale_factor_d->invert();
513+
ss_cons << "NLPObjGradScaling: NLP constraints (d) scaling due to option scaling_max_grad="
514+
<< max_grad << std::endl;
497515
} else {
498516
scale_factor_d->setToConstant(1.0);
499517
}
500518
} else {
501519
scale_factor_c->setToConstant(max_con_grad / scale_factor_c->infnorm());
502520
scale_factor_d->setToConstant(max_con_grad / scale_factor_d->infnorm());
521+
ss_cons << "NLPObjGradScaling: NLP constraints (c&d) scaling due to option scaling_max_con_grad="
522+
<< "max_con_grad" << std::endl;
503523
}
504524
if(min_grad > 0.0) {
505525
scale_factor_c->component_max(min_grad);
506526
scale_factor_d->component_max(min_grad);
527+
ss_cons << "NLPObjGradScaling: scaling for some constraints may have changed "
528+
<< "due to option scaling_min_grad option=" << min_grad;
529+
}
530+
if(ss_cons.str().size()>0) {
531+
nlp->log->printf(hovScalars, "%s\n", ss_cons.str().c_str());
532+
} else {
533+
nlp->log->printf(hovScalars,
534+
"NLPObjGradScaling: No NLP constraints scaling performed due to combination of options.\n");
507535
}
508536
}
509537

510538
hiopNLPObjGradScaling::~hiopNLPObjGradScaling()
511539
{
512-
if(scale_factor_c) delete scale_factor_c;
513-
if(scale_factor_d) delete scale_factor_d;
514-
if(scale_factor_cd) delete scale_factor_cd;
540+
delete scale_factor_c;
541+
delete scale_factor_d;
542+
delete scale_factor_cd;
515543
}
516544

517545
} // namespace hiop

src/Utils/hiopOptions.cpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -800,9 +800,8 @@ void hiopOptionsNLP::register_options()
800800
1e-8,
801801
0.0,
802802
1e+20,
803-
"a positive value for this option will be used as a lower bound for (and will overwrite) "
804-
"the scaling factors computed as instructed by options scaling_max_grad, scaling_max_obj_grad and "
805-
"scaling_max_con_grad.");
803+
"Any scaling factors (computed due to any of the scaling_max_grad, scaling_max_obj_grad, "
804+
"and scaling_max_con_grad options) smaller than this option will be set to it.");
806805
}
807806

808807
// outer iterative refinement

0 commit comments

Comments
 (0)