Skip to content

Commit 044e765

Browse files
authored
modify aistudio download adr (#2654)
1 parent f0f695a commit 044e765

20 files changed

+82
-91
lines changed

tests/generation/test_streamers.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,8 +37,8 @@ def get_inputs(self, model):
3737
}
3838

3939
def test_text_streamer_matches_non_streaming(self):
40-
tokenizer = AutoTokenizer.from_pretrained("test_paddleformers/tiny-random-llama")
41-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
40+
tokenizer = AutoTokenizer.from_pretrained("Paddleformers/tiny-random-llama")
41+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
4242
model.config.eos_token_id = -1
4343

4444
input_kwargs = self.get_inputs(model)
@@ -54,8 +54,8 @@ def test_text_streamer_matches_non_streaming(self):
5454
self.assertEqual(streamer_text, greedy_text)
5555

5656
def test_iterator_streamer_matches_non_streaming(self):
57-
tokenizer = AutoTokenizer.from_pretrained("test_paddleformers/tiny-random-llama")
58-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
57+
tokenizer = AutoTokenizer.from_pretrained("Paddleformers/tiny-random-llama")
58+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
5959
model.config.eos_token_id = -1
6060

6161
input_kwargs = self.get_inputs(model)
@@ -95,8 +95,8 @@ def test_text_streamer_decode_kwargs(self):
9595
self.assertEqual(streamer_text_tokenized.input_ids.shape, [1, 1])
9696

9797
def test_iterator_streamer_timeout(self):
98-
tokenizer = AutoTokenizer.from_pretrained("test_paddleformers/tiny-random-llama")
99-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
98+
tokenizer = AutoTokenizer.from_pretrained("Paddleformers/tiny-random-llama")
99+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
100100
model.config.eos_token_id = -1
101101

102102
input_kwargs = self.get_inputs(model)

tests/generation/test_synced_gpus.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,8 @@ def test_synced_gpus_greedy(self):
3535

3636

3737
if __name__ == "__main__":
38-
tokenizer = AutoTokenizer.from_pretrained("test_paddleformers/tiny-random-llama")
39-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
38+
tokenizer = AutoTokenizer.from_pretrained("Paddleformers/tiny-random-llama")
39+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
4040
model.config.eos_token_id = -1
4141
world_size = paddle.distributed.get_world_size()
4242

tests/mergekit/test_merge_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ class TestMergeModel(unittest.TestCase):
2626
@parameterized.expand([("slerp",), ("della",), ("dare_linear",), ("ties",)])
2727
def test_merge_model_np(self, merge_method):
2828
with TemporaryDirectory() as tempdir:
29-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert", dtype="bfloat16")
29+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert", dtype="bfloat16")
3030
pd_path = os.path.join(tempdir, "pd_model")
3131
model.save_pretrained(pd_path)
3232
safe_path = os.path.join(tempdir, "safe_model")
@@ -70,7 +70,7 @@ def test_merge_model_np(self, merge_method):
7070
@parameterized.expand([("slerp",), ("della",), ("dare_linear",), ("ties",)])
7171
def test_merge_model_pd(self, merge_method):
7272
with TemporaryDirectory() as tempdir:
73-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert", dtype="bfloat16")
73+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert", dtype="bfloat16")
7474
pd_path = os.path.join(tempdir, "pd_model")
7575
model.save_pretrained(pd_path)
7676
safe_path = os.path.join(tempdir, "safe_model")

tests/peft/test_lokr.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
"decompose_both": False,
3535
}
3636
DEFAULT_MODEL_TEST_CONFIG = {
37-
"base_model_name_or_path": "test_paddleformers/tiny-random-bert",
37+
"base_model_name_or_path": "Paddleformers/tiny-random-bert",
3838
"target_modules": [".*q_proj*.", ".*v_proj*."],
3939
"lokr_alpha": 8,
4040
"lokr_dim": 8,
@@ -155,13 +155,13 @@ class TestLoKrModel(unittest.TestCase):
155155
def test_tp_raise_exception(self):
156156
with self.assertRaises(NotImplementedError):
157157
lokr_config = LoKrConfig(**DEFAULT_MODEL_TEST_CONFIG, tensor_parallel_degree=2)
158-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
158+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
159159
lokr_model = LoKrModel(model, lokr_config)
160160
lokr_model.eval()
161161

162162
def test_lokr_model_restore(self):
163163
lokr_config = LoKrConfig(**DEFAULT_MODEL_TEST_CONFIG)
164-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
164+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
165165
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
166166
model.eval()
167167
original_results_1 = model(input_ids)
@@ -177,7 +177,7 @@ def test_lokr_model_restore(self):
177177
def test_lokr_model_constructor(self):
178178
lokr_config = LoKrConfig(**DEFAULT_MODEL_TEST_CONFIG)
179179
model = AutoModel.from_pretrained(
180-
"test_paddleformers/tiny-random-bert",
180+
"Paddleformers/tiny-random-bert",
181181
hidden_dropout_prob=0,
182182
attention_probs_dropout_prob=0,
183183
)
@@ -194,7 +194,7 @@ def test_lokr_model_save_load(self):
194194
with TemporaryDirectory() as tempdir:
195195
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
196196
lokr_config = LoKrConfig(**DEFAULT_MODEL_TEST_CONFIG)
197-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
197+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
198198
lokr_model = LoKrModel(model, lokr_config)
199199
lokr_model.eval()
200200
original_results = lokr_model(input_ids)

tests/peft/test_lora.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ def test_lora_model_restore(self):
8989
enable_lora_list=[None, [True, False]],
9090
head_dim=2,
9191
)
92-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
92+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
9393
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
9494
model.eval()
9595
original_results_1 = model(input_ids)
@@ -114,7 +114,7 @@ def test_lora_model_constructor(self, bias):
114114
)
115115
# turn off plm dropout for to test train vs test
116116
model = AutoModel.from_pretrained(
117-
"test_paddleformers/tiny-random-bert",
117+
"Paddleformers/tiny-random-bert",
118118
hidden_dropout_prob=0,
119119
attention_probs_dropout_prob=0,
120120
)
@@ -150,7 +150,7 @@ def test_lora_model_save_load(self):
150150
r=4,
151151
lora_alpha=8,
152152
)
153-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
153+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
154154
lora_model = LoRAModel(model, lora_config)
155155
lora_model.eval()
156156
original_results = lora_model(input_ids)
@@ -173,7 +173,7 @@ def test_lora_module_raise_exception(self):
173173
lora_alpha=8,
174174
enable_lora_list=None,
175175
)
176-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
176+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
177177
with self.assertRaises(ValueError):
178178
LoRAModel(model, lora_config)
179179

tests/peft/test_lorapro.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ def test_lorapro_model_restore(self):
100100
head_dim=2,
101101
lorapro=True,
102102
)
103-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
103+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
104104
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
105105
model.eval()
106106
original_results_1 = model(input_ids)
@@ -126,7 +126,7 @@ def test_lorapro_model_constructor(self, bias):
126126
)
127127
# turn off plm dropout for to test train vs test
128128
model = AutoModel.from_pretrained(
129-
"test_paddleformers/tiny-random-bert",
129+
"Paddleformers/tiny-random-bert",
130130
hidden_dropout_prob=0,
131131
attention_probs_dropout_prob=0,
132132
)
@@ -158,7 +158,7 @@ def test_lorapro_model_save_load(self):
158158
with TemporaryDirectory() as tempdir:
159159
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
160160
lorapro_config = LoRAConfig(target_modules=[".*q_proj.*", ".*v_proj.*"], r=4, lora_alpha=8, lorapro=True)
161-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
161+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
162162
lorapro_model = LoRAModel(model, lorapro_config)
163163
lorapro_model.eval()
164164
original_results = lorapro_model(input_ids)
@@ -186,7 +186,7 @@ def test_lorapro_modes(self, x_mode):
186186
lorapro=True,
187187
)
188188

189-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
189+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
190190
lorapro_model = LoRAModel(model, lorapro_config)
191191
lorapro_model.mark_only_lora_as_trainable()
192192

@@ -218,7 +218,7 @@ def test_lorapro_module_raise_exception(self):
218218
lorapro_config = LoRAConfig(
219219
target_modules=[".*norm1.*"], r=4, lora_alpha=8, enable_lora_list=None, lorapro=True
220220
)
221-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
221+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
222222
with self.assertRaises(ValueError):
223223
LoRAModel(model, lorapro_config)
224224

tests/peft/test_mora.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def test_mora_model_restore(self):
101101
head_dim=2,
102102
use_mora=True,
103103
)
104-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
104+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
105105
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
106106
model.eval()
107107
original_results_1 = model(input_ids)
@@ -127,7 +127,7 @@ def test_mora_model_constructor(self, bias):
127127
)
128128
# turn off plm dropout for to test train vs test
129129
model = AutoModel.from_pretrained(
130-
"test_paddleformers/tiny-random-bert",
130+
"Paddleformers/tiny-random-bert",
131131
hidden_dropout_prob=0,
132132
attention_probs_dropout_prob=0,
133133
)
@@ -159,7 +159,7 @@ def test_mora_model_save_load(self):
159159
with TemporaryDirectory() as tempdir:
160160
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
161161
mora_config = LoRAConfig(target_modules=[".*q_proj.*", ".*v_proj.*"], r=4, lora_alpha=8, use_mora=True)
162-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
162+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
163163
mora_model = LoRAModel(model, mora_config)
164164
mora_model.eval()
165165
original_results = mora_model(input_ids)
@@ -177,7 +177,7 @@ def test_mora_model_save_load(self):
177177

178178
def test_lora_module_raise_exception(self):
179179
mora_config = LoRAConfig(target_modules=[".*norm1.*"], r=4, lora_alpha=8, enable_lora_list=None, use_mora=True)
180-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
180+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
181181
with self.assertRaises(ValueError):
182182
LoRAModel(model, mora_config)
183183

tests/peft/test_mos_lora.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def test_lora_model_restore(self):
104104
head_dim=2,
105105
lora_use_mixer=True,
106106
)
107-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
107+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
108108
input_ids = paddle.to_tensor(np.random.randint(100, 200, [1, 20]))
109109
model.eval()
110110
original_results_1 = model(input_ids)
@@ -127,7 +127,7 @@ def test_parallel_support(self):
127127
lora_use_mixer=True,
128128
tensor_parallel_degree=2,
129129
)
130-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
130+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
131131
model.eval()
132132
with self.assertRaises(NotImplementedError):
133133
LoRAModel(model, lora_config)
@@ -145,7 +145,7 @@ def test_lora_model_constructor(self, bias):
145145
)
146146
# turn off plm dropout for to test train vs test
147147
model = AutoModel.from_pretrained(
148-
"test_paddleformers/tiny-random-bert",
148+
"Paddleformers/tiny-random-bert",
149149
hidden_dropout_prob=0,
150150
attention_probs_dropout_prob=0,
151151
)
@@ -179,7 +179,7 @@ def test_lora_model_save_load(self):
179179
lora_config = LoRAConfig(
180180
target_modules=[".*q_proj.*", ".*v_proj.*"], r=4, lora_alpha=8, lora_use_mixer=True
181181
)
182-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
182+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
183183
lora_model = LoRAModel(model, lora_config)
184184
lora_model.eval()
185185
original_results = lora_model(input_ids)
@@ -199,7 +199,7 @@ def test_lora_module_raise_exception(self):
199199
lora_config = LoRAConfig(
200200
target_modules=[".*norm1.*"], r=4, lora_alpha=8, enable_lora_list=None, lora_use_mixer=True
201201
)
202-
model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
202+
model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
203203
with self.assertRaises(ValueError):
204204
LoRAModel(model, lora_config)
205205

tests/peft/test_quant_lora.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ def setUpClass(cls):
102102
r=4,
103103
lora_alpha=8,
104104
)
105-
cls.model = AutoModel.from_pretrained("test_paddleformers/tiny-random-bert")
105+
cls.model = AutoModel.from_pretrained("Paddleformers/tiny-random-bert")
106106
cls.lora_model = LoRAModel(cls.model, lora_config)
107107
cls.lora_model.mark_only_lora_as_trainable()
108108
# lora_B parameter is initialized to 0, therefore AB = 0 and W + AB = W

tests/peft/test_reft.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737

3838
class TestReftDataCollator(unittest.TestCase):
3939
def test_call(self):
40-
model_name = "test_paddleformers/tiny-random-llama"
40+
model_name = "Paddleformers/tiny-random-llama"
4141
tokenizer = AutoTokenizer.from_pretrained(
4242
model_name,
4343
model_max_length=512,
@@ -79,7 +79,7 @@ def test_set_seed(self):
7979
set_seed(66)
8080

8181
def test_count_param(self):
82-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
82+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
8383
count_parameters(model)
8484

8585

@@ -191,7 +191,7 @@ def test_load_state_dict(self):
191191

192192
class TestReftModel(unittest.TestCase):
193193
def test_get_reft_model(self):
194-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
194+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
195195
layers = [0]
196196
representations = [
197197
{
@@ -216,7 +216,7 @@ def test_get_reft_model(self):
216216
self.assertTrue(type(reft_model), ReFTModel)
217217

218218
def test_reft_model_forward(self):
219-
model = AutoModelForCausalLM.from_pretrained("test_paddleformers/tiny-random-llama")
219+
model = AutoModelForCausalLM.from_pretrained("Paddleformers/tiny-random-llama")
220220

221221
layers = [0]
222222
representations = [

0 commit comments

Comments
 (0)