We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6840f2c commit 34a2c8fCopy full SHA for 34a2c8f
neural_compressor/torch/algorithms/layer_wise/utils.py
@@ -355,7 +355,7 @@ def clean_module_weight(module):
355
else:
356
param_cls = type(submodule._parameters[n])
357
kwargs = submodule._parameters[n].__dict__
358
- if is_hpu_available:
+ if is_hpu_available():
359
from habana_frameworks.torch.core import weight_sharing
360
361
if param_cls == weight_sharing.HabanaParameterWrapper:
0 commit comments