We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 6137606 + bde0030 commit fc895fdCopy full SHA for fc895fd
auto_fp8/modeling.py
@@ -71,10 +71,10 @@ def skip(*args, **kwargs):
71
torch.cuda.empty_cache()
72
73
# Important defaults
74
- if not hasattr(model_init_kwargs, "torch_dtype"):
+ if "torch_dtype" not in model_init_kwargs:
75
model_init_kwargs["torch_dtype"] = "auto"
76
77
- if not hasattr(model_init_kwargs, "device_map"):
+ if "device_map" not in model_init_kwargs:
78
model_init_kwargs["device_map"] = "auto"
79
80
merged_kwargs = {**model_init_kwargs, **cached_file_kwargs}
0 commit comments