|
| 1 | +# -*- coding: utf-8 -*- |
| 2 | +import torch |
| 3 | +import torch.nn as nn |
| 4 | +import numpy as np |
| 5 | + |
| 6 | +class ComplexConv(nn.Module): |
| 7 | + def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): |
| 8 | + super(ComplexConv,self).__init__() |
| 9 | + self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") |
| 10 | + self.padding = padding |
| 11 | + |
| 12 | + ## Model components |
| 13 | + self.conv_re = nn.Conv2d(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias) |
| 14 | + self.conv_im = nn.Conv2d(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias) |
| 15 | + |
| 16 | + def forward(self, x): # shpae of x : [batch,2,channel,axis1,axis2] |
| 17 | + real = self.conv_re(x[:,0]) - self.conv_im(x[:,1]) |
| 18 | + imaginary = self.conv_re(x[:,1]) + self.conv_im(x[:,0]) |
| 19 | + output = torch.stack((real,imaginary),dim=1) |
| 20 | + return output |
| 21 | + |
| 22 | +#%% |
| 23 | +if __name__ == "__main__": |
| 24 | + ## Random Tensor for Input |
| 25 | + ## shape : [batchsize,2,channel,axis1_size,axis2_size] |
| 26 | + ## Below dimensions are totally random |
| 27 | + x = torch.randn((10,2,3,100,100)) |
| 28 | + |
| 29 | + # 1. Make ComplexConv Object |
| 30 | + ## (in_channel, out_channel, kernel_size) parameter is required |
| 31 | + complexConv = ComplexConv(3,10,(5,5)) |
| 32 | + |
| 33 | + # 2. compute |
| 34 | + y = complexConv(x) |
| 35 | + |
0 commit comments