From ad1de860bb4adcd91565e73cbda85ac3b4331678 Mon Sep 17 00:00:00 2001 From: Enzo Busseti Date: Tue, 9 Jul 2024 19:03:30 +0400 Subject: [PATCH] forgot to update test --- project_euromir/tests/test_loss.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/project_euromir/tests/test_loss.py b/project_euromir/tests/test_loss.py index d67af84..4a20ddc 100644 --- a/project_euromir/tests/test_loss.py +++ b/project_euromir/tests/test_loss.py @@ -103,7 +103,7 @@ def _hessian_from_dresidual(cls, xy): dres = cls._dresidual_linop(xy) return sp.sparse.linalg.LinearOperator( shape=(cls.n+cls.m, cls.n+cls.m), - matvec=lambda dxy: dres.T @ (dres @ (dxy * 2.))) + matvec=lambda dxy: dres.T @ (dres @ dxy )) def test_gradient(self): """Test that the gradient is numerically accurate.""" @@ -129,7 +129,7 @@ def test_loss_residual(self): xy = np.random.randn(self.n+self.m) self.assertTrue( np.isclose(self._loss(xy), - np.linalg.norm(self._residual(xy))**2)) + np.linalg.norm(self._residual(xy))**2/2.)) def test_dr_drt(self): """Test that DR and DR^T are consistent.""" @@ -146,7 +146,7 @@ def test_dresidual_gradient(self): np.random.seed(seed) xy = np.random.randn(self.n+self.m) grad = self._grad(xy) - newgrad = 2 * (self._dresidual_linop(xy).T @ self._residual(xy)) + newgrad = (self._dresidual_linop(xy).T @ self._residual(xy)) self.assertTrue(np.allclose(grad, newgrad)) def test_dresidual_hessian(self):