Skip to content

Commit f3ac4d8

Browse files
authored
Adding L1 Decay Regularizer (#5173)
1 parent 9ecebb2 commit f3ac4d8

File tree

2 files changed

+77
-1
lines changed

2 files changed

+77
-1
lines changed

python/paddle/v2/framework/regularizer.py

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import paddle.v2.framework.framework as framework
22

3-
__all__ = ['append_regularization_ops', 'L2DecayRegularizer']
3+
__all__ = [
4+
'append_regularization_ops', 'L2DecayRegularizer', 'L1DecayRegularizer'
5+
]
46

57

68
def append_regularization_ops(parameters_and_grads):
@@ -97,3 +99,43 @@ def __call__(self, param, block):
9799
attrs={"scale": self._regularization_coeff})
98100

99101
return decay
102+
103+
104+
class L1DecayRegularizer(WeightDecayRegularizer):
105+
"""Implements the L1 Weight Decay Regularization
106+
"""
107+
108+
def __init__(self, regularization_coeff=0.0):
109+
assert regularization_coeff is not None
110+
super(L1DecayRegularizer, self).__init__()
111+
self._regularization_coeff = regularization_coeff
112+
113+
def __call__(self, param, block):
114+
"""Add L1 weight decay ops to network
115+
116+
Adds L1 weight decay ops.
117+
L1WeightDecay = reg_coeff * sign(parameter)
118+
119+
Args:
120+
param: parameter variable for which regularization is applied
121+
block: block in which variable is to be created
122+
123+
Returns:
124+
new variable for weight decay
125+
"""
126+
assert isinstance(param, framework.Parameter)
127+
assert isinstance(block, framework.Block)
128+
decay = block.create_var(
129+
dtype="float32", shape=param.shape, lod_level=param.lod_level)
130+
# Append sign op
131+
block.append_op(
132+
type='sign', inputs={"X": param}, outputs={"Out": decay})
133+
134+
# Append scale op to the output of sign op
135+
block.append_op(
136+
type='scale',
137+
inputs={"X": decay},
138+
outputs={"Out": decay},
139+
attrs={"scale": self._regularization_coeff})
140+
141+
return decay

python/paddle/v2/framework/tests/test_regularizer.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,5 +39,39 @@ def test_l2decay_regularizer(self):
3939
self.assertEqual(block.ops[-2].type, 'scale')
4040

4141

42+
class TestL1DecayRegularizer(unittest.TestCase):
43+
def test_l2decay_regularizer(self):
44+
program = framework.Program()
45+
block = program.global_block()
46+
mul_x = block.create_parameter(
47+
dtype="float32",
48+
shape=[5, 10],
49+
lod_level=0,
50+
name="mul.x",
51+
regularizer=regularizer.L1DecayRegularizer(0.5))
52+
self.assertTrue(mul_x.regularizer is not None)
53+
self.assertTrue(
54+
isinstance(mul_x.regularizer, regularizer.L1DecayRegularizer))
55+
mul_y = block.create_var(
56+
dtype="float32", shape=[10, 8], lod_level=0, name="mul.y")
57+
mul_out = block.create_var(
58+
dtype="float32", shape=[5, 8], lod_level=0, name="mul.out")
59+
block.append_op(
60+
type="mul",
61+
inputs={"X": mul_x,
62+
"Y": mul_y},
63+
outputs={"Out": mul_out},
64+
attrs={"x_num_col_dims": 1})
65+
params_grads = append_backward_ops(mul_out)
66+
self.assertEqual(len(params_grads), 1)
67+
count_ops = len(block.ops)
68+
params_grads = optimizer.append_regularization_ops(params_grads)
69+
self.assertEqual(len(params_grads), 1)
70+
self.assertEqual(len(block.ops), count_ops + 3)
71+
self.assertEqual(block.ops[-1].type, 'elementwise_add')
72+
self.assertEqual(block.ops[-2].type, 'scale')
73+
self.assertEqual(block.ops[-3].type, 'sign')
74+
75+
4276
if __name__ == '__main__':
4377
unittest.main()

0 commit comments

Comments
 (0)