Skip to content

Commit 48088de

Browse files
Replace more_terms by normalization
1 parent e34b656 commit 48088de

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

tensorflow_probability/python/math/special.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -249,14 +249,14 @@ def _betainc_der_continued_fraction(a, b, x, dtype, where):
249249
cf, cf_grad_a, cf_grad_b = _betainc_modified_lentz_method(
250250
a, b, x, dtype, where)
251251

252-
more_terms = tf.math.exp(
252+
normalization = tf.math.exp(
253253
tf.math.xlogy(a, x) + tf.math.xlog1py(b, -x) -
254254
tf.math.log(a) - lbeta(a, b))
255255

256256
digamma_apb = tf.math.digamma(a + b)
257-
grad_a = more_terms * (cf_grad_a + cf * (tf.math.log(x) -
257+
grad_a = normalization * (cf_grad_a + cf * (tf.math.log(x) -
258258
tf.math.reciprocal(a) + digamma_apb - tf.math.digamma(a)))
259-
grad_b = more_terms * (cf_grad_b + cf * (tf.math.log1p(-x) +
259+
grad_b = normalization * (cf_grad_b + cf * (tf.math.log1p(-x) +
260260
digamma_apb - tf.math.digamma(b)))
261261

262262
# If we are taking advantage of the symmetry relation, then we have to
@@ -344,13 +344,13 @@ def power_series_evaluation(should_stop, values, gradients):
344344
_, _, series_sum = values
345345
_, series_grad_a, series_grad_b = gradients
346346

347-
more_terms = tf.math.exp(
347+
normalization = tf.math.exp(
348348
tf.math.xlogy(safe_a, safe_x) - lbeta(safe_a, safe_b))
349349

350350
digamma_apb = tf.math.digamma(safe_a + safe_b)
351-
grad_a = more_terms * (series_grad_a + series_sum * (
351+
grad_a = normalization * (series_grad_a + series_sum * (
352352
digamma_apb - tf.math.digamma(safe_a) + tf.math.log(safe_x)))
353-
grad_b = more_terms * (series_grad_b + series_sum * (
353+
grad_b = normalization * (series_grad_b + series_sum * (
354354
digamma_apb - tf.math.digamma(safe_b)))
355355

356356
# If we are taking advantage of the symmetry relation, then we have to

0 commit comments

Comments
 (0)