We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 2b52759 commit 8a07376Copy full SHA for 8a07376
modelopt/torch/speculative/plugins/megatron_eagle.py
@@ -90,7 +90,7 @@ def dict_to_config(
90
fp16=fp16,
91
bf16=bf16,
92
params_dtype=getattr(torch, architecture_config["torch_dtype"]),
93
- pipeline_dtype=None,
+ pipeline_dtype=getattr(torch, architecture_config["torch_dtype"]),
94
num_layers=architecture_config.get("num_hidden_layers"),
95
hidden_size=architecture_config.get("hidden_size"),
96
ffn_hidden_size=architecture_config.get("intermediate_size"),
0 commit comments