Skip to content

Commit 6396bc5

Browse files
arjungtensorflow-copybara
authored andcommitted
Add graph and adversarial loss values scalars to the summary.
This will allow visualization of the graph and adversrial losses in TensorBoard. PiperOrigin-RevId: 321660205
1 parent 0d50227 commit 6396bc5

File tree

2 files changed

+16
-4
lines changed

2 files changed

+16
-4
lines changed

neural_structured_learning/estimator/adversarial_regularization.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,12 +121,18 @@ def adv_model_fn(features, labels, mode, params=None, config=None):
121121

122122
# Runs the base model again to compute loss on adv_neighbor.
123123
adv_spec = base_fn(adv_neighbor, labels)
124+
scaled_adversarial_loss = adv_config.multiplier * adv_spec.loss
125+
tf.compat.v1.summary.scalar('loss/scaled_adversarial_loss',
126+
scaled_adversarial_loss)
124127

125-
final_loss = original_spec.loss + adv_config.multiplier * adv_spec.loss
128+
supervised_loss = original_spec.loss
129+
tf.compat.v1.summary.scalar('loss/supervised_loss', supervised_loss)
130+
131+
final_loss = supervised_loss + scaled_adversarial_loss
126132

127133
if not optimizer_fn:
128134
# Default to the Adagrad optimizer, the same as canned DNNEstimator.
129-
optimizer = tf.train.AdagradOptimizer(learning_rate=0.05)
135+
optimizer = tf.compat.v1.train.AdagradOptimizer(learning_rate=0.05)
130136
else:
131137
optimizer = optimizer_fn()
132138

neural_structured_learning/estimator/graph_regularization.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,11 +144,17 @@ def graph_reg_model_fn(features, labels, mode, params=None, config=None):
144144
nbr_embeddings,
145145
weights=nbr_weights,
146146
distance_config=graph_reg_config.distance_config)
147-
total_loss = base_spec.loss + graph_reg_config.multiplier * graph_loss
147+
scaled_graph_loss = graph_reg_config.multiplier * graph_loss
148+
tf.compat.v1.summary.scalar('loss/scaled_graph_loss', scaled_graph_loss)
149+
150+
supervised_loss = base_spec.loss
151+
tf.compat.v1.summary.scalar('loss/supervised_loss', supervised_loss)
152+
153+
total_loss = supervised_loss + scaled_graph_loss
148154

149155
if not optimizer_fn:
150156
# Default to Adagrad optimizer, the same as the canned DNNEstimator.
151-
optimizer = tf.train.AdagradOptimizer(learning_rate=0.05)
157+
optimizer = tf.compat.v1.train.AdagradOptimizer(learning_rate=0.05)
152158
else:
153159
optimizer = optimizer_fn()
154160
train_op = optimizer.minimize(

0 commit comments

Comments
 (0)