Skip to content
This repository was archived by the owner on Jan 21, 2025. It is now read-only.

Commit 759b788

Browse files
author
Mesh TensorFlow Team
committed
Change the interface to decode to a generator.
PiperOrigin-RevId: 354495680
1 parent 135f62f commit 759b788

File tree

1 file changed

+5
-7
lines changed

1 file changed

+5
-7
lines changed

mesh_tensorflow/transformer/utils.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1099,8 +1099,8 @@ def decode(estimator,
10991099
targets_vocabulary) tuple
11001100
checkpoint_path: an optional string
11011101
1102-
Returns:
1103-
list of decoded strings
1102+
Yields:
1103+
decoded strings
11041104
"""
11051105
result_iter = estimator.predict(
11061106
input_fn, checkpoint_path=checkpoint_path)
@@ -1110,18 +1110,16 @@ def _maybe_detokenize(value, vocab):
11101110
return value
11111111
return vocab.decode([int(x) for x in value])
11121112

1113-
decodes = []
11141113
for i, result in enumerate(result_iter):
11151114
input_string = _maybe_detokenize(
11161115
result["inputs"], inputs_vocabulary(vocabulary))
11171116
output_string = _maybe_detokenize(
11181117
result["outputs"], targets_vocabulary(vocabulary))
1119-
decodes.append(output_string)
1118+
yield output_string
11201119
if i & (i - 1) == 0:
11211120
# LOG every power of 2.
11221121
tf.logging.info("decoded {}: {}".format(i, input_string))
11231122
tf.logging.info(" -> {}".format(output_string))
1124-
return decodes
11251123

11261124

11271125
@gin.configurable
@@ -1220,8 +1218,8 @@ def input_fn(params):
12201218
return dataset
12211219

12221220
checkpoint_step = get_step_from_checkpoint_path(checkpoint_path)
1223-
decodes = decode(
1224-
estimator, input_fn, vocabulary, checkpoint_path=checkpoint_path)
1221+
decodes = list(decode(
1222+
estimator, input_fn, vocabulary, checkpoint_path=checkpoint_path))
12251223
# Remove any padded examples
12261224
dataset_size = len(inputs) * repeats
12271225
decodes = decodes[:dataset_size]

0 commit comments

Comments
 (0)