We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 8d79007 commit f15d53eCopy full SHA for f15d53e
vllm/model_executor/models/mixtral.py
@@ -337,7 +337,7 @@ def forward(
337
return hidden_states
338
339
def load_weights(self, weights: Iterable[tuple[str,
340
- torch.Tensor]]) -> Set[str]:
+ torch.Tensor]]) -> set[str]:
341
if not self.split_qkv:
342
stacked_params_mapping = [
343
# (param_name, shard_name, shard_id)
0 commit comments