Skip to content

Commit 1f018c6

Browse files
committed
Added found GGUF filepaths to the warning.
1 parent fcaeda7 commit 1f018c6

File tree

1 file changed

+9
-8
lines changed

1 file changed

+9
-8
lines changed

src/hf_mem/cli.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -125,23 +125,24 @@ async def run(
125125
url = f"https://huggingface.co/api/models/{model_id}/tree/{revision}?recursive=true"
126126
files = await get_json_file(client=client, url=url, headers=headers)
127127
file_paths = [f["path"] for f in files if f.get("path") and f.get("type") == "file"]
128+
128129

130+
# NOTE: GGUF support only applies if:
131+
# 1. The `--gguf-file` flag is set.
132+
# 2. No Safetensors files are found and at least one gguf file is found
133+
gguf_paths = [f for f in file_paths if str(f).endswith(".gguf")]
129134
has_safetensors = any(f in ["model.safetensors", "model.safetensors.index.json", "model_index.json"] for f in file_paths)
130-
has_gguf = any([f for f in file_paths if str(f).endswith(".gguf")])
131-
gguf = gguf_file is not None or (has_gguf and not has_safetensors)
135+
gguf = gguf_file is not None or (gguf_paths and not has_safetensors)
132136

133-
if not gguf and (has_safetensors and has_gguf):
137+
if not gguf and (has_safetensors and gguf_paths):
134138
warnings.warn(
135-
f"Both Safetensors and GGUF files have been found for {model_id} @ {revision}, if you want to estimate any of the GGUF file sizes, please use the `--gguf-file` flag with the path to the specific GGUF file. Estimation will continue for Safetensors files."
139+
f"Both Safetensors and GGUF files have been found for {model_id} @ {revision}, if you want to estimate any of the GGUF file sizes, please use the `--gguf-file` flag with the path to the specific GGUF file. GGUF files found: {gguf_paths}."
136140
)
137-
# NOTE: GGUF support only applies if:
138-
# 1. The `--gguf-file` flag is set.
139-
# 2. No Safetensors files are found and at least one gguf file is found
141+
140142
if gguf:
141143
if kv_cache_dtype not in GGUFDtype.__members__ and kv_cache_dtype != "auto":
142144
raise RuntimeError(f"--kv-cache-dtype={kv_cache_dtype} not recognized for GGUF files. Valid options: {list(GGUFDtype.__members__.keys())} or `auto`.")
143145

144-
gguf_paths = [f for f in file_paths if str(f).endswith(".gguf")]
145146
if not gguf_paths:
146147
raise RuntimeError(f"No GGUF files found for {model_id} @ {revision}.")
147148

0 commit comments

Comments
 (0)