Skip to content

Commit b16613e

Browse files
committed
Replaced some model configs for quicker CI tests for LLMs
Signed-off-by: Dhiraj Kumar Sah <dhirajku@qti.qualcomm.com>
1 parent f6f3731 commit b16613e

File tree

1 file changed

+5
-31
lines changed

1 file changed

+5
-31
lines changed

tests/configs/causal_model_configs.json

Lines changed: 5 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -274,19 +274,6 @@
274274
]
275275
}
276276
},
277-
{
278-
"model_name": "hpcai-tech/grok-1",
279-
"model_type": null,
280-
"additional_params": {
281-
"max_position_embeddings": 128,
282-
"num_hidden_layers": 1,
283-
"num_attention_heads": 2,
284-
"hidden_size": 64,
285-
"intermediate_size": 256,
286-
"vocab_size": 131072,
287-
"num_key_value_heads": 1
288-
}
289-
},
290277
{
291278
"model_name": "neuralmagic/Llama-3.2-3B-Instruct-FP8",
292279
"model_type": "llama",
@@ -374,20 +361,7 @@
374361
}
375362
},
376363
{
377-
"model_name": "unsloth/gemma-2b",
378-
"model_type": "gemma",
379-
"additional_params": {
380-
"max_position_embeddings": 128,
381-
"num_hidden_layers": 1,
382-
"num_attention_heads": 2,
383-
"hidden_size": 64,
384-
"intermediate_size": 256,
385-
"vocab_size": 256000,
386-
"num_key_value_heads": 1
387-
}
388-
},
389-
{
390-
"model_name": "unsloth/gemma-2-2b",
364+
"model_name": "hf-internal-testing/tiny-random-Gemma2ForCausalLM",
391365
"model_type": "gemma2",
392366
"additional_params": {
393367
"max_position_embeddings": 128,
@@ -400,7 +374,7 @@
400374
}
401375
},
402376
{
403-
"model_name": "ibm-granite/granite-20b-code-base",
377+
"model_name": "hf-internal-testing/tiny-random-GPTBigCodeForCausalLM",
404378
"model_type": "gpt_bigcode",
405379
"additional_params": {
406380
"max_position_embeddings": 128,
@@ -430,7 +404,7 @@
430404
}
431405
},
432406
{
433-
"model_name": "mistralai/Mixtral-8x7B-Instruct-v0.1",
407+
"model_name": "hf-internal-testing/tiny-random-MixtralForCausalLM",
434408
"model_type": "mixtral",
435409
"additional_params": {
436410
"max_position_embeddings": 128,
@@ -443,7 +417,7 @@
443417
}
444418
},
445419
{
446-
"model_name": "meta-llama/Llama-3.2-1B",
420+
"model_name": "hf-internal-testing/tiny-random-LlamaForCausalLM",
447421
"model_type": "llama",
448422
"additional_params": {
449423
"max_position_embeddings": 128,
@@ -476,7 +450,7 @@
476450
}
477451
},
478452
{
479-
"model_name": "ibm-granite/granite-3.1-2b-instruct",
453+
"model_name": "hf-internal-testing/tiny-random-GraniteForCausalLM",
480454
"model_type": "granite",
481455
"additional_params": {
482456
"max_position_embeddings": 128,

0 commit comments

Comments
 (0)