From bc17d71f4aea3e422db9b3f05f4f2b2fc39198b4 Mon Sep 17 00:00:00 2001 From: Aaron Fenyes Date: Fri, 19 Sep 2025 11:14:38 -0700 Subject: [PATCH] Update the search state when the softness changes Also, tighten the convergence requirements to account for how the softness parameter affects the loss function. --- app-proto/src/engine.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/app-proto/src/engine.rs b/app-proto/src/engine.rs index e51fcbc..491a2f9 100644 --- a/app-proto/src/engine.rs +++ b/app-proto/src/engine.rs @@ -476,7 +476,7 @@ pub fn realize_gram( history.config.push(state.config.clone()); history.scaled_loss.push(state.loss_hard / scale_adjustment); grad_size = neg_grad_stacked.norm_squared(); - if state.loss_hard < tol && grad_size < GRAD_TOL { break; } + if state.loss_hard < tol && grad_size < softness * GRAD_TOL { break; } // compute the Newton step /* TO DO */ @@ -515,13 +515,14 @@ pub fn realize_gram( // if we're near a minimum of the total loss, but the hard loss still // isn't tolerably low, make the soft constraints softer const SOFTNESS_BACKOFF_THRESHOLD: f64 = 1e-6; - const SOFTNESS_BACKOFF: f64 = 0.5; - if state.loss_hard >= tol && grad_size < SOFTNESS_BACKOFF_THRESHOLD { + const SOFTNESS_BACKOFF: f64 = 0.9; + if state.loss_hard >= tol && grad_size < softness * SOFTNESS_BACKOFF_THRESHOLD { softness *= SOFTNESS_BACKOFF; + state = SearchState::from_config(gram, soft, softness, state.config); console_log!("Softness decreased to {softness}"); } } - let result = if state.loss_hard < tol && grad_size < GRAD_TOL { + let result = if state.loss_hard < tol && grad_size < softness * GRAD_TOL { // express the uniform basis in the standard basis const UNIFORM_DIM: usize = 4; let total_dim_unif = UNIFORM_DIM * assembly_dim;