Update the search state when the softness changes

Also, tighten the convergence requirements to account for how the
softness parameter affects the loss function.
This commit is contained in:
Aaron Fenyes 2025-09-19 11:14:38 -07:00
parent a203f6bc1b
commit bc17d71f4a

View file

@ -476,7 +476,7 @@ pub fn realize_gram(
history.config.push(state.config.clone());
history.scaled_loss.push(state.loss_hard / scale_adjustment);
grad_size = neg_grad_stacked.norm_squared();
if state.loss_hard < tol && grad_size < GRAD_TOL { break; }
if state.loss_hard < tol && grad_size < softness * GRAD_TOL { break; }
// compute the Newton step
/* TO DO */
@ -515,13 +515,14 @@ pub fn realize_gram(
// if we're near a minimum of the total loss, but the hard loss still
// isn't tolerably low, make the soft constraints softer
const SOFTNESS_BACKOFF_THRESHOLD: f64 = 1e-6;
const SOFTNESS_BACKOFF: f64 = 0.5;
if state.loss_hard >= tol && grad_size < SOFTNESS_BACKOFF_THRESHOLD {
const SOFTNESS_BACKOFF: f64 = 0.9;
if state.loss_hard >= tol && grad_size < softness * SOFTNESS_BACKOFF_THRESHOLD {
softness *= SOFTNESS_BACKOFF;
state = SearchState::from_config(gram, soft, softness, state.config);
console_log!("Softness decreased to {softness}");
}
}
let result = if state.loss_hard < tol && grad_size < GRAD_TOL {
let result = if state.loss_hard < tol && grad_size < softness * GRAD_TOL {
// express the uniform basis in the standard basis
const UNIFORM_DIM: usize = 4;
let total_dim_unif = UNIFORM_DIM * assembly_dim;