Skip to content

Commit cb3f30c

Browse files
authored
Upgrade transformers version to 4.36.0 (#2046)
1 parent f3e024b commit cb3f30c

File tree

3 files changed

+5
-5
lines changed

3 files changed

+5
-5
lines changed

requirements-rocm.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ numpy
1010
tokenizers>=0.15.0
1111
huggingface_hub<0.18,>=0.16.4
1212
einops # Required for phi-1_5
13-
transformers >= 4.34.0 # Required for Mistral.
13+
transformers >= 4.36.0 # Required for Mixtral.
1414
fastapi
1515
uvicorn[standard]
1616
pydantic == 1.10.13 # Required for OpenAI server.

requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ sentencepiece # Required for LLaMA tokenizer.
77
numpy
88
einops # Required for phi-1_5
99
torch >= 2.1.1
10-
transformers >= 4.34.0 # Required for Mistral.
10+
transformers >= 4.36.0 # Required for Mixtral.
1111
xformers >= 0.0.23 # Required for CUDA 12.1.
1212
fastapi
1313
uvicorn[standard]

vllm/model_executor/models/mixtral.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
import torch.nn.functional as F
3030

3131
from torch import nn
32-
from transformers import MistralConfig
32+
from transformers import MixtralConfig
3333

3434
try:
3535
import megablocks.ops as ops
@@ -395,7 +395,7 @@ class MixtralDecoderLayer(nn.Module):
395395

396396
def __init__(
397397
self,
398-
config: MistralConfig,
398+
config: MixtralConfig,
399399
) -> None:
400400
super().__init__()
401401
self.hidden_size = config.hidden_size
@@ -443,7 +443,7 @@ class MixtralForCausalLM(nn.Module):
443443

444444
def __init__(
445445
self,
446-
config: MistralConfig,
446+
config: MixtralConfig,
447447
linear_method: Optional[LinearMethodBase] = None,
448448
) -> None:
449449
super().__init__()

0 commit comments

Comments
 (0)