|
14 | 14 | # ==============================================================================
|
15 | 15 | """Math operations."""
|
16 | 16 |
|
17 |
| -from __future__ import absolute_import |
18 |
| -from __future__ import division |
19 |
| -from __future__ import print_function |
20 |
| - |
21 |
| -import tensorflow.compat.v1 as tf |
| 17 | +import tensorflow as tf |
22 | 18 |
|
23 | 19 |
|
24 | 20 | __all__ = [
|
|
28 | 24 | ]
|
29 | 25 |
|
30 | 26 |
|
31 |
| -@tf.RegisterGradient("IdentityFirstOfTwoInputs") |
32 |
| -def _identity_first_of_two_inputs_grad(op, grad): |
33 |
| - """Gradient for `lower_bound` or `upper_bound` if `gradient == 'identity'`. |
34 |
| -
|
35 |
| - Args: |
36 |
| - op: The op for which to calculate a gradient. |
37 |
| - grad: Gradient with respect to the output of the op. |
38 |
| -
|
39 |
| - Returns: |
40 |
| - Gradient with respect to the inputs of the op. |
41 |
| - """ |
42 |
| - del op # unused |
43 |
| - return [grad, None] |
44 |
| - |
45 |
| - |
46 |
| -@tf.RegisterGradient("UpperBound") |
47 |
| -def _upper_bound_grad(op, grad): |
48 |
| - """Gradient for `upper_bound` if `gradient == 'identity_if_towards'`. |
49 |
| -
|
50 |
| - Args: |
51 |
| - op: The op for which to calculate a gradient. |
52 |
| - grad: Gradient with respect to the output of the op. |
53 |
| -
|
54 |
| - Returns: |
55 |
| - Gradient with respect to the inputs of the op. |
56 |
| - """ |
57 |
| - inputs, bound = op.inputs |
58 |
| - pass_through_if = tf.logical_or(inputs <= bound, grad > 0) |
59 |
| - return [tf.cast(pass_through_if, grad.dtype) * grad, None] |
60 |
| - |
61 |
| - |
62 |
| -@tf.RegisterGradient("LowerBound") |
63 |
| -def _lower_bound_grad(op, grad): |
64 |
| - """Gradient for `lower_bound` if `gradient == 'identity_if_towards'`. |
65 |
| -
|
66 |
| - Args: |
67 |
| - op: The op for which to calculate a gradient. |
68 |
| - grad: Gradient with respect to the output of the op. |
69 |
| -
|
70 |
| - Returns: |
71 |
| - Gradient with respect to the inputs of the op. |
72 |
| - """ |
73 |
| - inputs, bound = op.inputs |
74 |
| - pass_through_if = tf.logical_or(inputs >= bound, grad < 0) |
75 |
| - return [tf.cast(pass_through_if, grad.dtype) * grad, None] |
76 |
| - |
77 |
| - |
78 |
| -def upper_bound(inputs, bound, gradient="identity_if_towards", name=None): |
| 27 | +def upper_bound(inputs, bound, gradient="identity_if_towards", |
| 28 | + name="upper_bound"): |
79 | 29 | """Same as `tf.minimum`, but with helpful gradient for `inputs > bound`.
|
80 | 30 |
|
81 | 31 | This function behaves just like `tf.minimum`, but the behavior of the gradient
|
@@ -110,27 +60,37 @@ def upper_bound(inputs, bound, gradient="identity_if_towards", name=None):
|
110 | 60 | Raises:
|
111 | 61 | ValueError: for invalid value of `gradient`.
|
112 | 62 | """
|
113 |
| - try: |
114 |
| - gradient = { |
115 |
| - "identity_if_towards": "UpperBound", |
116 |
| - "identity": "IdentityFirstOfTwoInputs", |
117 |
| - "disconnected": None, |
118 |
| - }[gradient] |
119 |
| - except KeyError: |
120 |
| - raise ValueError("Invalid value for `gradient`: '{}'.".format(gradient)) |
121 |
| - |
122 |
| - with tf.name_scope(name, "UpperBound", [inputs, bound]) as scope: |
| 63 | + with tf.name_scope(name) as scope: |
123 | 64 | inputs = tf.convert_to_tensor(inputs, name="inputs")
|
124 |
| - bound = tf.convert_to_tensor( |
125 |
| - bound, name="bound", dtype=inputs.dtype) |
126 |
| - if gradient: |
127 |
| - with tf.get_default_graph().gradient_override_map({"Minimum": gradient}): |
128 |
| - return tf.minimum(inputs, bound, name=scope) |
129 |
| - else: |
130 |
| - return tf.minimum(inputs, bound, name=scope) |
| 65 | + bound = tf.convert_to_tensor(bound, name="bound", dtype=inputs.dtype) |
| 66 | + |
| 67 | + def identity_if_towards_grad(grad): |
| 68 | + """Gradient if gradient == 'identity_if_towards'.""" |
| 69 | + pass_through_if = tf.logical_or(inputs <= bound, grad > 0) |
| 70 | + return (tf.cast(pass_through_if, grad.dtype) * grad, None) |
| 71 | + |
| 72 | + def disconnected_grad(grad): |
| 73 | + """Gradient if gradient == 'disconnected'.""" |
| 74 | + return (tf.cast(inputs <= bound, grad.dtype) * grad, None) |
| 75 | + |
| 76 | + try: |
| 77 | + gradient = { |
| 78 | + "identity_if_towards": identity_if_towards_grad, |
| 79 | + "identity": lambda grad: (grad, None), |
| 80 | + "disconnected": disconnected_grad, |
| 81 | + }[gradient] |
| 82 | + except KeyError: |
| 83 | + raise ValueError("Invalid value for `gradient`: '{}'.".format(gradient)) |
| 84 | + |
| 85 | + @tf.custom_gradient |
| 86 | + def _upper_bound(inputs, bound): |
| 87 | + return tf.minimum(inputs, bound, name=scope), gradient |
| 88 | + |
| 89 | + return _upper_bound(inputs, bound) |
131 | 90 |
|
132 | 91 |
|
133 |
| -def lower_bound(inputs, bound, gradient="identity_if_towards", name=None): |
| 92 | +def lower_bound(inputs, bound, gradient="identity_if_towards", |
| 93 | + name="lower_bound"): |
134 | 94 | """Same as `tf.maximum`, but with helpful gradient for `inputs < bound`.
|
135 | 95 |
|
136 | 96 | This function behaves just like `tf.maximum`, but the behavior of the gradient
|
@@ -165,24 +125,33 @@ def lower_bound(inputs, bound, gradient="identity_if_towards", name=None):
|
165 | 125 | Raises:
|
166 | 126 | ValueError: for invalid value of `gradient`.
|
167 | 127 | """
|
168 |
| - try: |
169 |
| - gradient = { |
170 |
| - "identity_if_towards": "LowerBound", |
171 |
| - "identity": "IdentityFirstOfTwoInputs", |
172 |
| - "disconnected": None, |
173 |
| - }[gradient] |
174 |
| - except KeyError: |
175 |
| - raise ValueError("Invalid value for `gradient`: '{}'.".format(gradient)) |
176 |
| - |
177 |
| - with tf.name_scope(name, "LowerBound", [inputs, bound]) as scope: |
| 128 | + with tf.name_scope(name) as scope: |
178 | 129 | inputs = tf.convert_to_tensor(inputs, name="inputs")
|
179 |
| - bound = tf.convert_to_tensor( |
180 |
| - bound, name="bound", dtype=inputs.dtype) |
181 |
| - if gradient: |
182 |
| - with tf.get_default_graph().gradient_override_map({"Maximum": gradient}): |
183 |
| - return tf.maximum(inputs, bound, name=scope) |
184 |
| - else: |
185 |
| - return tf.maximum(inputs, bound, name=scope) |
| 130 | + bound = tf.convert_to_tensor(bound, name="bound", dtype=inputs.dtype) |
| 131 | + |
| 132 | + def identity_if_towards_grad(grad): |
| 133 | + """Gradient if gradient == 'identity_if_towards'.""" |
| 134 | + pass_through_if = tf.logical_or(inputs >= bound, grad < 0) |
| 135 | + return (tf.cast(pass_through_if, grad.dtype) * grad, None) |
| 136 | + |
| 137 | + def disconnected_grad(grad): |
| 138 | + """Gradient if gradient == 'disconnected'.""" |
| 139 | + return (tf.cast(inputs >= bound, grad.dtype) * grad, None) |
| 140 | + |
| 141 | + try: |
| 142 | + gradient = { |
| 143 | + "identity_if_towards": identity_if_towards_grad, |
| 144 | + "identity": lambda grad: (grad, None), |
| 145 | + "disconnected": disconnected_grad, |
| 146 | + }[gradient] |
| 147 | + except KeyError: |
| 148 | + raise ValueError("Invalid value for `gradient`: '{}'.".format(gradient)) |
| 149 | + |
| 150 | + @tf.custom_gradient |
| 151 | + def _lower_bound(inputs, bound): |
| 152 | + return tf.maximum(inputs, bound, name=scope), gradient |
| 153 | + |
| 154 | + return _lower_bound(inputs, bound) |
186 | 155 |
|
187 | 156 |
|
188 | 157 | def perturb_and_apply(f, x, *args, u=None, x_plus_u=None, expected_grads=True):
|
|
0 commit comments