Change loss function to match gradient

This commit is contained in:
Aaron Fenyes 2024-07-09 14:00:24 -07:00
parent 023759a267
commit 77bc124170

View File

@ -104,7 +104,7 @@ function realize_gram(
# do gradient descent
Δ_proj = proj_diff(gram, L'*Q*L)
loss = norm(Δ_proj)
loss = dot(Δ_proj, Δ_proj)
for step in 1:max_descent_steps
# stop if the loss is tolerably low
if loss < tol
@ -128,7 +128,7 @@ function realize_gram(
history.stepsize[end] = stepsize
L = L_last + stepsize * neg_grad
Δ_proj = proj_diff(gram, L'*Q*L)
loss = norm(Δ_proj)
loss = dot(Δ_proj, Δ_proj)
improvement = loss_last - loss
if improvement >= target_improvement * stepsize * slope
history.backoff_steps[end] = backoff_steps