Skip to content

Commit ddb0285

Browse files
committed
change full gradient back to TF
1 parent 0fc5f62 commit ddb0285

File tree

1 file changed

+6
-3
lines changed

1 file changed

+6
-3
lines changed

batchglm/train/tf/nb_glm/estimator.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -396,9 +396,12 @@ def __init__(
396396
name="full_data_trainers_b_only"
397397
)
398398
with tf.name_scope("full_gradient"):
399-
# full_gradient = full_data_trainers.gradient[0][0]
400-
# full_gradient = tf.reduce_sum(tf.abs(full_gradient), axis=0)
401-
full_gradient = tf.reduce_sum(full_data_model.neg_jac, axis=0)
399+
# use same gradient as the optimizers
400+
full_gradient = full_data_trainers.gradient[0][0]
401+
full_gradient = tf.reduce_sum(tf.abs(full_gradient), axis=0)
402+
403+
# # the analytic Jacobian
404+
# full_gradient = tf.reduce_sum(full_data_model.neg_jac, axis=0)
402405
# full_gradient = tf.add_n(
403406
# [tf.reduce_sum(tf.abs(grad), axis=0) for (grad, var) in full_data_trainers.gradient])
404407

0 commit comments

Comments
 (0)