@@ -496,10 +496,13 @@ def validate_inputs(hf_model, ep_path_pairs):
496496 Returns:
497497 bool: True if all inputs are valid, False otherwise.
498498 """
499- # Check HF model path (only if provided)
500- if hf_model and not os .path .exists (hf_model ):
501- print (f"[ERROR] Hugging Face model path does not exist: { hf_model } " )
502- return False
499+ # Check HF model path (only if provided and it looks like a local path)
500+ # If it doesn't exist locally, assume it's a HF model name to be downloaded
501+ if hf_model and os .path .exists (hf_model ):
502+ # Verify it's a valid directory
503+ if not os .path .isdir (hf_model ):
504+ print (f"[ERROR] Hugging Face model path is not a directory: { hf_model } " )
505+ return False
503506
504507 # Check execution providers and paths
505508 for ep , path in ep_path_pairs :
@@ -525,6 +528,10 @@ def main():
525528 python compute_kl_divergence.py --hf_model "F:\\ shared\\ Llama-3.1-8B-Instruct"
526529 --ep cuda --path "G:\\ models\\ cuda_model" --output "hf_vs_cuda.json"
527530
531+ # Compare HF vs CUDA model (download from Hugging Face)
532+ python compute_kl_divergence.py --hf_model "meta-llama/Llama-3.1-8B-Instruct"
533+ --ep cuda --path "G:\\ models\\ cuda_model" --output "hf_vs_cuda.json"
534+
528535 # Compare HF vs CUDA vs DirectML models
529536 python compute_kl_divergence.py --hf_model "F:\\ shared\\ Llama-3.1-8B-Instruct"
530537 --ep cuda --path "G:\\ models\\ cuda_model"
0 commit comments