Skip to content

Commit 95594d9

Browse files
committed
fix http
1 parent e078385 commit 95594d9

File tree

4 files changed

+14
-6
lines changed

4 files changed

+14
-6
lines changed

_unittests/ut_torch_models/test_hghub_model.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,7 @@ def _diff(c1, c2):
199199
for mid in load_models_testing():
200200
with self.subTest(mid=mid):
201201
if mid in {
202+
"hf-internal-testing/tiny-random-BeitForImageClassification",
202203
"hf-internal-testing/tiny-random-MaskFormerForInstanceSegmentation",
203204
"hf-internal-testing/tiny-random-MoonshineForConditionalGeneration",
204205
"fxmarty/pix2struct-tiny-random",

onnx_diagnostic/torch_models/hghub/hub_api.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import functools
2+
import os
23
from typing import Any, Dict, List, Optional, Union
34
import transformers
45
from huggingface_hub import HfApi, model_info
@@ -33,25 +34,27 @@ def get_cached_configuration(name: str) -> Optional[transformers.PretrainedConfi
3334
assert cached, "no cached configuration, which is weird"
3435
if name in cached:
3536
return cached[name]()
37+
if os.environ.get("NOHTTP", ""):
38+
raise AssertionError(f"Unable to find {name!r} in {sorted(cached)}")
3639
return None
3740

3841

3942
def get_pretrained_config(
40-
model_id: str, trust_remote_code: bool = True, use_cached: bool = True
43+
model_id: str, trust_remote_code: bool = True, use_preinstalled: bool = True
4144
) -> Any:
4245
"""
4346
Returns the config for a model_id.
4447
4548
:param model_id: model id
4649
:param trust_remote_code: trust_remote_code,
4750
see :meth:`transformers.AutoConfig.from_pretrained`
48-
:param used_cached: if cached, uses this version to avoid
51+
:param use_preinstalled: if use_preinstalled, uses this version to avoid
4952
accessing the network, if available, it is returned by
5053
:func:`get_cached_configuration`, the cached list is mostly for
5154
unit tests
5255
:return: a configuration
5356
"""
54-
if use_cached:
57+
if use_preinstalled:
5558
conf = get_cached_configuration(model_id)
5659
if conf is not None:
5760
return conf

onnx_diagnostic/torch_models/hghub/hub_data_cached_configs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3259,8 +3259,8 @@ def _ccached_hf_m4_tiny_random_idefics():
32593259
)
32603260

32613261

3262-
def _ccached_openai_whisper_itny():
3263-
"openai/whipser-tiny"
3262+
def _ccached_openai_whisper_tiny():
3263+
"openai/whisper-tiny"
32643264
return transformers.WhisperConfig(
32653265
**{
32663266
"_name_or_path": "openai/whisper-tiny",

onnx_diagnostic/torch_models/hghub/model_inputs.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -326,6 +326,7 @@ def get_untrained_model_with_inputs(
326326
verbose: int = 0,
327327
dynamic_rope: Optional[bool] = None,
328328
same_as_pretrained: bool = False,
329+
use_preinstalled: bool = True,
329330
) -> Dict[str, Any]:
330331
"""
331332
Gets a non initialized model similar to the original model
@@ -342,6 +343,7 @@ def get_untrained_model_with_inputs(
342343
:param dynamic_rope: use dynamic rope (see :class:`transformers.LlamaConfig`)
343344
:param same_as_pretrained: if True, do not change the default values
344345
to get a smaller model
346+
:param use_preinstalled: use preinstalled configurations
345347
:return: dictionary with a model, inputs, dynamic shapes, and the configuration
346348
347349
Example:
@@ -363,8 +365,10 @@ def get_untrained_model_with_inputs(
363365
"""
364366
if verbose:
365367
print(f"[get_untrained_model_with_inputs] model_id={model_id!r}")
368+
if use_preinstalled:
369+
print(f"[get_untrained_model_with_inputs] use preinstalled {model_id!r}")
366370
if config is None:
367-
config = get_pretrained_config(model_id)
371+
config = get_pretrained_config(model_id, use_preinstalled=use_preinstalled)
368372
archs = config.architectures # type: ignore
369373
assert archs is not None and len(archs) == 1, (
370374
f"Unable to determine the architecture for model {model_id!r}, "

0 commit comments

Comments
 (0)