diff --git a/yaglm/opt/constraint/convex.py b/yaglm/opt/constraint/convex.py index 905734a..9b23f87 100644 --- a/yaglm/opt/constraint/convex.py +++ b/yaglm/opt/constraint/convex.py @@ -1,5 +1,6 @@ import numpy as np - +from numpy.linalg import norm +from sklearn.isotonic import isotonic_regression from yaglm.opt.base import Func @@ -25,6 +26,28 @@ def _prox(self, x, step=1): def is_proximable(self): return True +class LinearEquality(Constraint): + # credited to PyUNLocBoX + # https://github.com/epfl-lts2/pyunlocbox/ + def __init__(self, A, b): + self.A = A + self.b = b + self._pinvA = None # will be set on first request to .pinvA + + def _prox(self, x, step=1): + residue = self.A@x - self.b + sol = x - self.pinvA @ residue + return sol + + @property + def pinvA(self): + if self._pinvA is None: + self._pinvA = np.linalg.pinv(self.A) + return self._pinvA + + @property + def is_proximable(self): + return True class Simplex(Constraint): @@ -58,9 +81,7 @@ def is_proximable(self): # See https://gist.github.com/mblondel/6f3b7aaad90606b98f71 # for more algorithms. def project_simplex(v, z=1): - if np.sum(v) <= z: - return v - + # z is what the entries need to add up to, e.g. z=1 for probability simplex n_features = v.shape[0] u = np.sort(v)[::-1] cssv = np.cumsum(u) - z @@ -74,3 +95,35 @@ def project_simplex(v, z=1): def project_l1_ball(v, z=1): return np.sign(v) * project_simplex(np.abs(v), z) + +class L2Ball(Constraint): + + def __init__(self, mult=1): + assert mult > 0 + self.mult = mult + + def _prox(self, x, step=1): + return x / np.max([norm(x)/self.mult, 1]) + + @property + def is_proximable(self): + return True + +class Isotonic(Constraint): + """Constraint for x1 <= ... <= xn or + x1 >= ... >= xn """ + # TODO: allow for general isotonic regression + # where the order relations are a simple directed + # graph. For an algorithm see Nemeth and Nemeth, "How to project onto an + # isotone projection cone", JLLA 2010 + def __init__(self, increasing=True): + self.increasing = increasing + + def _prox(self, x, step=1): + # computes the projection of x onto the monotone cone + # using the PAVA algorithm + return isotonic_regression(x, increasing=self.increasing) + + @property + def is_proximable(self): + return True diff --git a/yaglm/opt/constraint/tests.py b/yaglm/opt/constraint/tests.py new file mode 100644 index 0000000..a3099bc --- /dev/null +++ b/yaglm/opt/constraint/tests.py @@ -0,0 +1,62 @@ +import numpy as np +from unittest import TestCase +from yaglm.opt.constraint import convex + +class TestProjectionsOnConstraints(TestCase): + def setUp(self): + pass + + def assert_arrays_close(self, test_array, ref_array): + "Custom assertion for arrays being almost equal" + try: + np.testing.assert_allclose(test_array, ref_array) + except AssertionError: + self.fail() + + def test_Positive(self): + cons = convex.Positive() + v = np.array([-1, 0, 2, 3, -2]) + self.assert_arrays_close(cons.prox(v), [0, 0, 2, 3, 0]) + self.assertEqual(cons.prox(-2), 0) + + def test_LinearEquality(self): + A = np.identity(2) + b = np.array([1,1]) + cons = convex.LinearEquality(A, b) + proj = cons.prox(b) # the proj of b should just be b + self.assert_arrays_close(proj, b) + + def test_L2Ball(self): + cons1 = convex.L2Ball(1) + self.assert_arrays_close(cons1.prox([0,0,0]), [0,0,0]) + self.assert_arrays_close(cons1.prox([1,0,0]), [1,0,0]) + self.assert_arrays_close(cons1.prox([0.5,0,0]), [0.5,0,0]) + self.assert_arrays_close(cons1.prox([1,1,1]), np.array([1,1,1])/np.sqrt(3)) + self.assert_arrays_close(cons1.prox([1,-1,1]), np.array([1,-1,1])/np.sqrt(3)) + + cons4 = convex.L2Ball(4) + self.assert_arrays_close(cons4.prox([0,0,0]), [0,0,0]) + self.assert_arrays_close(cons4.prox([1,0,0]), [1,0,0]) + self.assert_arrays_close(cons4.prox([0.5,0,0]), [0.5,0,0]) + self.assert_arrays_close(cons4.prox([-4,3,0]), np.array([-4,3,0])/(5/4)) + + def test_Isotonic(self): + cons = convex.Isotonic(increasing=True) + for v in [ + np.arange(5), + np.array([-1, 0, 2, 3, -2]), + np.array([-1, 3, 0, 3, 2]) + ]: + result = cons.prox(v) + lags = result[1:] - result[:-1] + self.assertTrue((lags >= 0).all()) + + cons = convex.Isotonic(increasing=False) + for v in [ + np.arange(5), + np.array([-1, 0, 2, 3, -2]), + np.array([-1, 3, 0, 3, 2]) + ]: + result = cons.prox(v) + lags = result[1:] - result[:-1] + self.assertTrue((lags <= 0).all())