@@ -1590,6 +1590,35 @@ def forward(self, x):
15901590 (torch .tensor ([[[0 , 1 ], [0 , 1 ]], [[4 , 2 ], [3 , 3 ]]]),),
15911591 )
15921592
1593+ # def test_vulkan_backend_conv_with_dim_order(self):
1594+ # class Conv2dSequential(torch.nn.Module):
1595+ # def __init__(self, bias=True, channel_last=False):
1596+ # super().__init__()
1597+ # self.first = torch.nn.Conv2d(
1598+ # in_channels=1,
1599+ # out_channels=3,
1600+ # kernel_size=(3, 3),
1601+ # padding=1,
1602+ # bias=bias,
1603+ # )
1604+ # self.second = torch.nn.Conv2d(
1605+ # in_channels=3,
1606+ # out_channels=2,
1607+ # kernel_size=(3, 3),
1608+ # padding=1,
1609+ # bias=bias,
1610+ # )
1611+
1612+ # def forward(self, x):
1613+ # x = x.to(memory_format=torch.channels_last)
1614+ # return self.second(self.first(x))
1615+
1616+ # self.lower_module_and_test_output(
1617+ # Conv2dSequential(),
1618+ # (torch.rand(size=[1, 1, 3, 3]),),
1619+ #
1620+ # )
1621+
15931622 def test_vulkan_backend_flip (self ):
15941623 class FlipModule (torch .nn .Module ):
15951624 def __init__ (self ):
@@ -1654,32 +1683,3 @@ def forward(self, x):
16541683 GridPriorsModule (),
16551684 (torch .rand (size = [1 , 5 , 2 , 3 ]),),
16561685 )
1657-
1658- # def test_vulkan_backend_conv_with_dim_order(self):
1659- # class Conv2dSequential(torch.nn.Module):
1660- # def __init__(self, bias=True, channel_last=False):
1661- # super().__init__()
1662- # self.first = torch.nn.Conv2d(
1663- # in_channels=1,
1664- # out_channels=3,
1665- # kernel_size=(3, 3),
1666- # padding=1,
1667- # bias=bias,
1668- # )
1669- # self.second = torch.nn.Conv2d(
1670- # in_channels=3,
1671- # out_channels=2,
1672- # kernel_size=(3, 3),
1673- # padding=1,
1674- # bias=bias,
1675- # )
1676-
1677- # def forward(self, x):
1678- # x = x.to(memory_format=torch.channels_last)
1679- # return self.second(self.first(x))
1680-
1681- # self.lower_module_and_test_output(
1682- # Conv2dSequential(),
1683- # (torch.rand(size=[1, 1, 3, 3]),),
1684- #
1685- # )
0 commit comments