We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 561f144 commit e7aed34Copy full SHA for e7aed34
tests/models/test_save_aiu.py
@@ -85,11 +85,11 @@ def test_large_outlier_bert(
85
# Loaded model w/ recomputed SAWB should have widened channel quantization stdev
86
for k, v in state_dict.items():
87
if k.endswith(".weight") and any(n in k for n in bert_linear_names):
88
- perCh_stdev_model = layer2stdev.get(k)
89
- perCh_stdev_loaded = v.to(torch.float32).std(dim=stddev_dim)
+ stddev_model = layer2stdev.get(k)
+ stddev_loaded = v.to(torch.float32).std(dim=stddev_dim)
90
91
# SAWB stddev should be at least as good as Qmax stddev w/ outlier
92
- assert torch.all(perCh_stdev_loaded >= perCh_stdev_model)
+ assert torch.all(stddev_loaded >= stddev_model)
93
94
95
def test_clip_vals_zero_bert(
0 commit comments