@@ -240,27 +240,36 @@ def quantizer_asymmetric(request):
240240
241241
242242# Create random clip vals for Per Channel ; must be accompanied by the same tensor
243- clip_low_perCh = []
244- clip_high_perCh = []
245- for tensor_size in tensor_sizes :
246- clip_low_row = - torch .rand (tensor_size ) - 2.5 # [-3.5, -2.5]
247- clip_high_row = torch .rand (tensor_size ) + 2.5 # [2.5, 3.5]
248- clip_low_perCh .append (clip_low_row )
249- clip_high_perCh .append (clip_high_row )
243+ qschemes_asymmetric_perCh_params = []
244+ for qunit in ["perCh" ]:
245+ for symmetric in [False ]:
246+ for Ngrp in [False ]:
247+ for single_sided in [False ]:
248+ # needs to be disabled for some special cases
249+ for qlevel_lowering in [True ]:
250+ for axis in [0 ]:
251+ qschemes_asymmetric_perCh_params .append (
252+ Qscheme (
253+ unit = qunit ,
254+ symmetric = symmetric ,
255+ single_sided = single_sided ,
256+ qlevel_lowering = qlevel_lowering ,
257+ Nch = 1 , # temp value
258+ axis = axis ,
259+ )
260+ )
250261
251262quantizer_asymmetric_perCh_params = []
252263for num_bits in torch .tensor ([8 , 4 ]):
253- for clip_low in clip_low_perCh :
254- for clip_high in clip_high_perCh :
255- for scheme in qschemes_asymmetric_params :
256- quantizer_asymmetric_params .append (
257- {
258- "num_bits" : num_bits ,
259- "clip_low" : clip_low ,
260- "clip_high" : clip_high ,
261- "scheme" : scheme ,
262- }
263- )
264+ for scheme in qschemes_asymmetric_perCh_params :
265+ quantizer_asymmetric_perCh_params .append (
266+ {
267+ "num_bits" : num_bits ,
268+ # "clip_low": -clip_high,
269+ # "clip_high": clip_high,
270+ "scheme" : scheme ,
271+ }
272+ )
264273
265274
266275@pytest .fixture (scope = "session" , params = quantizer_asymmetric_perCh_params )
0 commit comments