We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 8212265 commit aa328edCopy full SHA for aa328ed
modelopt/torch/speculative/plugins/megatron_eagle.py
@@ -1404,6 +1404,8 @@ def pseudo_speculative_generate(
1404
)
1405
if self.config.sequence_parallel:
1406
gathered_embedding = gather_from_sequence_parallel_region(eagle_inputs["embedding"])
1407
+ else:
1408
+ gathered_embedding = eagle_inputs["embedding"]
1409
if self.eagle_config.parallel_draft_step > 1:
1410
# Replace dummy hidden_states with embedding for mask tokens
1411
padded_hidden_states[
0 commit comments