Skip to content

Commit ea1fcb0

Browse files
Seed transformers models in tests (#466)
Signed-off-by: Keval Morabia <[email protected]>
1 parent 37c4974 commit ea1fcb0

File tree

1 file changed

+8
-0
lines changed

1 file changed

+8
-0
lines changed

tests/_test_utils/torch_model/transformers_models.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
import pytest
2020
import torch
21+
from _test_utils.torch_misc import set_seed
2122
from packaging.version import Version
2223

2324
transformers = pytest.importorskip("transformers")
@@ -40,8 +41,11 @@
4041

4142
import modelopt.torch.opt as mto
4243

44+
SEED = 1234
45+
4346

4447
def get_tiny_qwen3(**config_kwargs) -> "Qwen3ForCausalLM":
48+
set_seed(SEED)
4549
if Version(transformers.__version__) < Version("4.51"):
4650
pytest.skip("Qwen3ForCausalLM is not supported in transformers < 4.51")
4751

@@ -61,6 +65,7 @@ def get_tiny_qwen3(**config_kwargs) -> "Qwen3ForCausalLM":
6165

6266

6367
def get_tiny_llama(**config_kwargs) -> LlamaForCausalLM:
68+
set_seed(SEED)
6469
kwargs = {
6570
"hidden_size": 32,
6671
"intermediate_size": 32,
@@ -77,6 +82,7 @@ def get_tiny_llama(**config_kwargs) -> LlamaForCausalLM:
7782

7883

7984
def get_tiny_t5(**config_kwargs) -> T5ForConditionalGeneration:
85+
set_seed(SEED)
8086
kwargs = {
8187
"vocab_size": 32,
8288
"d_model": 32,
@@ -95,6 +101,7 @@ def get_tiny_t5(**config_kwargs) -> T5ForConditionalGeneration:
95101

96102

97103
def get_tiny_gpt_oss(**config_kwargs) -> "GptOssForCausalLM":
104+
set_seed(SEED)
98105
if Version(transformers.__version__) < Version("4.55"):
99106
pytest.skip("GptOssForCausalLM is not supported in transformers < 4.55")
100107

@@ -145,6 +152,7 @@ def create_tiny_t5_dir(tmp_path: Path | str, with_tokenizer: bool = False, **con
145152

146153

147154
def create_tiny_bert_dir(tmp_path: Path) -> Path:
155+
set_seed(SEED)
148156
model = BertForQuestionAnswering(
149157
BertConfig(
150158
vocab_size=64,

0 commit comments

Comments
 (0)