@@ -14,6 +14,8 @@ class Nonlinear(ProxOperator):
1414 - ``fun``: a method evaluating the generic function :math:`f`
1515 - ``grad``: a method evaluating the gradient of the generic function
1616 :math:`f`
17+ - ``fungrad``: a method evaluating both the generic function :math:`f`
18+ and its gradient
1719 - ``optimize``: a method that solves the optimization problem associated
1820 with the proximal operator of :math:`f`. Note that the
1921 ``gradprox`` method must be used (instead of ``grad``) as this will
@@ -58,6 +60,12 @@ def _funprox(self, x, tau):
5860 def _gradprox (self , x , tau ):
5961 return self .grad (x ) + 1. / tau * (x - self .y )
6062
63+ def _fungradprox (self , x , tau ):
64+ f , g = self .fungrad (x )
65+ f = f + 1. / (2 * tau ) * ((x - self .y ) ** 2 ).sum ()
66+ g = g + 1. / tau * (x - self .y )
67+ return f , g
68+
6169 def fun (self , x ):
6270 raise NotImplementedError ('The method fun has not been implemented.'
6371 'Refer to the documentation for details on '
@@ -66,6 +74,10 @@ def grad(self, x):
6674 raise NotImplementedError ('The method grad has not been implemented.'
6775 'Refer to the documentation for details on '
6876 'how to subclass this operator.' )
77+ def fungrad (self , x ):
78+ raise NotImplementedError ('The method grad has not been implemented.'
79+ 'Refer to the documentation for details on '
80+ 'how to subclass this operator.' )
6981 def optimize (self ):
7082 raise NotImplementedError ('The method optimize has not been implemented.'
7183 'Refer to the documentation for details on '
0 commit comments