Skip to content

Commit 9b17ee7

Browse files
Codebase Refactor
1 parent be6ec27 commit 9b17ee7

29 files changed

+516
-468
lines changed

DeepQuant/CustomForwards/Activations.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
# Federico Brancasi <[email protected]>
66

77
import torch.nn as nn
8-
from torch import Tensor
98
from brevitas.nn.quant_layer import QuantNonLinearActLayer
9+
from torch import Tensor
1010

1111

1212
class WrapperActivation(nn.Module):
@@ -28,4 +28,4 @@ def activationForward(self: QuantNonLinearActLayer, inp: Tensor) -> Tensor:
2828
else:
2929
output = quantInput
3030
quantOutput = self.act_quant(output) if self.act_quant is not None else output
31-
return quantOutput
31+
return quantOutput

DeepQuant/CustomForwards/Linear.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
# Federico Brancasi <[email protected]>
66

77
import torch.nn as nn
8-
from torch import Tensor
98
from brevitas.nn.quant_layer import QuantWeightBiasInputOutputLayer
9+
from torch import Tensor
1010

1111

1212
class WrapperLinear(nn.Module):
@@ -33,4 +33,4 @@ def linearForward(self: QuantWeightBiasInputOutputLayer, inp: Tensor) -> Tensor:
3333

3434
output = self.wrappedInnerForwardImpl(quantInput, quantWeight, quantBias)
3535
quantOutput = self.output_quant(output)
36-
return quantOutput
36+
return quantOutput

DeepQuant/CustomForwards/MultiHeadAttention.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,11 @@
55
# Federico Brancasi <[email protected]>
66

77
import math
8+
89
import torch
910
import torch.nn.functional as F
10-
from torch import Tensor
1111
from brevitas.nn.quant_mha import QuantMultiheadAttention
12+
from torch import Tensor
1213

1314

1415
def mhaForward(
@@ -59,4 +60,4 @@ def mhaForward(
5960
)
6061

6162
attnOutput = self.out_proj(attnOutput)
62-
return attnOutput
63+
return attnOutput

0 commit comments

Comments
 (0)