@@ -27,78 +27,77 @@ def __init__(self, name):
2727 self .name = name
2828 self .conv = nn .Conv2d (3 , 3 , kernel_size = 3 , padding = 1 )
2929 self .relu = nn .ReLU ()
30- self .tensor = nn .Parameter (2.0 * torch .ones ([192 ])) # 1D tensor
3130
3231 def forward (self , x ):
3332 x = self .conv (x )
3433 x = self .relu (x )
3534 x = torch .reshape (x , (- 1 ,))
3635
3736 if self .name == 'add' :
38- const = torch .add (self . tensor , 1 )
37+ y = torch .add (x , 1 )
3938 elif self .name == 'relu6' :
40- const = torch .nn .functional .relu6 (self . tensor )
39+ y = torch .nn .functional .relu6 (x )
4140 elif self .name == 'relu' :
42- const = torch .nn .functional .relu (self . tensor )
41+ y = torch .nn .functional .relu (x )
4342 elif self .name == 'sigmoid' :
44- const = torch .nn .functional .sigmoid (self . tensor )
43+ y = torch .nn .functional .sigmoid (x )
4544 elif self .name == 'leaky_relu' :
46- const = torch .nn .functional .leaky_relu (self . tensor )
45+ y = torch .nn .functional .leaky_relu (x )
4746 elif self .name == 'mul' :
48- const = torch .mul (self . tensor , 1 )
47+ y = torch .mul (x , 1 )
4948 elif self .name == 'sub' :
50- const = torch .sub (self . tensor , 1 )
49+ y = torch .sub (x , 1 )
5150 elif self .name == 'div' :
52- const = torch .div (self . tensor , 1 )
51+ y = torch .div (x , 1 )
5352 elif self .name == 'softmax' :
54- const = torch .nn .functional .softmax (self . tensor )
53+ y = torch .nn .functional .softmax (x )
5554 elif self .name == 'tanh' :
56- const = torch .nn .functional .tanh (self . tensor )
55+ y = torch .nn .functional .tanh (x )
5756 elif self .name == 'negative' :
58- const = torch .negative (self . tensor )
57+ y = torch .negative (x )
5958 elif self .name == 'abs' :
60- const = torch .abs (self . tensor )
59+ y = torch .abs (x )
6160 elif self .name == 'sqrt' :
62- const = torch .sqrt (self . tensor )
61+ y = torch .sqrt (torch . clamp ( x , min = 1e-6 ) )
6362 elif self .name == 'rsqrt' :
64- const = torch .rsqrt (self . tensor )
63+ y = torch .rsqrt (torch . clamp ( x , min = 1e-6 ) )
6564 elif self .name == 'silu' :
66- const = torch .nn .functional .silu (self . tensor )
65+ y = torch .nn .functional .silu (x )
6766 elif self .name == 'hardswish' :
68- const = torch .nn .functional .hardswish (self . tensor )
67+ y = torch .nn .functional .hardswish (x )
6968 elif self .name == 'hardsigmoid' :
70- const = torch .nn .functional .hardsigmoid (self . tensor )
69+ y = torch .nn .functional .hardsigmoid (x )
7170 elif self .name == 'pow' :
72- const = torch .pow (self . tensor , 1 )
71+ y = torch .pow (x , 1 )
7372 elif self .name == 'gelu' :
74- const = torch .nn .functional .gelu (self . tensor )
73+ y = torch .nn .functional .gelu (x )
7574 elif self .name == 'cos' :
76- const = torch .cos (self . tensor )
75+ y = torch .cos (x )
7776 elif self .name == 'sin' :
78- const = torch .sin (self . tensor )
77+ y = torch .sin (x )
7978 elif self .name == 'exp' :
80- const = torch .exp (self . tensor )
79+ y = torch .exp (x )
8180 elif self .name == 'mean' :
82- const = torch .mean (self . tensor , dim = 0 , keepdim = True )
81+ y = torch .mean (x , dim = 0 , keepdim = True )
8382 elif self .name == 'amax' :
84- const = torch .amax (self . tensor , dim = 0 , keepdim = True )
83+ y = torch .amax (x , dim = 0 , keepdim = True )
8584 elif self .name == 'maximum' :
86- const = torch .maximum (self . tensor , torch .tensor (0.0 ))
85+ y = torch .maximum (x , torch .tensor (0.0 ))
8786 elif self .name == 'minimum' :
88- const = torch .minimum (self . tensor , torch .tensor (0.0 ))
87+ y = torch .minimum (x , torch .tensor (0.0 ))
8988 elif self .name == 'sum' :
90- const = torch .sum (self . tensor , dim = 0 , keepdim = True )
91-
92- y = x + const
89+ y = torch .sum (x , dim = 0 , keepdim = True )
90+ elif self . name == 'linalg_norm' :
91+ y = torch . linalg . norm ( x , dim = 0 , keepdim = True )
9392 return y
9493
9594def representative_data_gen ():
9695 yield [torch .randn (1 , 3 , 8 , 8 )]
9796
9897@pytest .mark .parametrize ("layer" , [
99- 'add' , 'relu6' , 'relu' , 'sigmoid' , 'leaky_relu' , 'mul' , 'sub' , 'div' , 'softmax' ,
100- 'tanh' , 'negative' , 'abs' , 'sqrt' , 'rsqrt' , 'silu' , 'hardswish' , 'hardsigmoid' ,
101- 'pow' , 'gelu' , 'cos' , 'sin' , 'exp' , 'mean' , 'amax' , 'maximum' , 'minimum' , 'sum'
98+ 'add' , 'relu6' , 'relu' , 'sigmoid' , 'leaky_relu' , 'mul' , 'sub' , 'div' , 'mean' , 'amax' , ' softmax' ,
99+ 'tanh' , 'negative' , 'maximum' , 'minimum' , ' abs' , 'sqrt' , 'sum ' , 'rsqrt' , 'silu' , 'hardswish' , 'hardsigmoid' ,
100+ 'linalg_norm' , ' pow' , 'gelu' , 'cos' , 'sin' , 'exp' ,
102101])
103102def test_ptq_1d_tensor (layer ):
104103
@@ -118,9 +117,9 @@ def test_ptq_1d_tensor(layer):
118117
119118
120119@pytest .mark .parametrize ("layer" , [
121- 'add' , 'relu6' , 'relu' , 'sigmoid' , 'leaky_relu' , 'mul' , 'sub' , 'div' , 'softmax' ,
122- 'tanh' , 'negative' , 'abs' , 'sqrt' , 'rsqrt' , 'silu' , 'hardswish' , 'hardsigmoid' ,
123- 'pow' , 'gelu' , 'cos' , 'sin' , 'exp' , 'mean' , 'amax' , 'maximum' , 'minimum' , 'sum'
120+ 'add' , 'relu6' , 'relu' , 'sigmoid' , 'leaky_relu' , 'mul' , 'sub' , 'div' , 'mean' , 'amax' , ' softmax' ,
121+ 'tanh' , 'negative' , 'maximum' , 'minimum' , ' abs' , 'sqrt' , 'sum ' , 'rsqrt' , 'silu' , 'hardswish' , 'hardsigmoid' ,
122+ 'linalg_norm' , ' pow' , 'gelu' , 'cos' , 'sin' , 'exp' ,
124123])
125124def test_ptq_mixed_precision_1d_tensor (layer ):
126125
@@ -149,9 +148,9 @@ def test_ptq_mixed_precision_1d_tensor(layer):
149148
150149
151150@pytest .mark .parametrize ("layer" , [
152- 'add' , 'relu6' , 'relu' , 'sigmoid' , 'leaky_relu' , 'mul' , 'sub' , 'div' , 'softmax' ,
153- 'tanh' , 'negative' , 'abs' , 'sqrt' , 'sum' , 'rsqrt' , 'silu' , 'hardswish' , 'hardsigmoid' ,
154- 'pow' , 'gelu' , 'cos' , 'sin' , 'exp' , 'mean' , 'amax' , 'maximum' , 'minimum'
151+ 'add' , 'relu6' , 'relu' , 'sigmoid' , 'leaky_relu' , 'mul' , 'sub' , 'div' , 'mean' , 'amax' , ' softmax' ,
152+ 'tanh' , 'negative' , 'maximum' , 'minimum' , ' abs' , 'sqrt' , 'sum' , 'rsqrt' , 'silu' , 'hardswish' , 'hardsigmoid' ,
153+ 'linalg_norm' , ' pow' , 'gelu' , 'cos' , 'sin' , 'exp' ,
155154])
156155def test_gptq_1d_tensor (layer ):
157156
0 commit comments