@@ -91,40 +91,6 @@ def test_op_empty_replacement_contiguous(self) -> None:
9191 ),
9292 )
9393
94- def test_op_clone_dim_order_preserves_channels_last (self ):
95- x = torch .randn (2 , 3 , 4 , 5 ).to (memory_format = torch .channels_last )
96- y = torch .ops .dim_order_ops ._clone_dim_order .default (x )
97-
98- assert y .is_contiguous (
99- memory_format = torch .channels_last
100- ), "_clone_dim_order output is not in channels_last memory format."
101- assert torch .allclose (x , y )
102-
103- def test_op_clone_dim_order_to_contiguous (self ):
104- x = torch .randn (2 , 3 , 4 , 5 ).to (memory_format = torch .channels_last )
105- contiguous_dim_order = get_dim_order (torch .contiguous_format , x .dim ())
106- y = torch .ops .dim_order_ops ._clone_dim_order .default (
107- x , dim_order = contiguous_dim_order
108- )
109-
110- assert (
111- y .is_contiguous ()
112- ), "_clone_dim_order output is not in contiguous memory format"
113- assert torch .allclose (x , y )
114-
115- def test_op_clone_dim_order_out_to_channels_last (self ):
116- x = torch .randn (2 , 3 , 4 , 5 ).contiguous ()
117- y = torch .empty_like (x , memory_format = torch .channels_last )
118- channels_last_dim_order = get_dim_order (torch .channels_last , y .dim ())
119- torch .ops .dim_order_ops ._clone_dim_order .out (
120- x , dim_order = channels_last_dim_order , out = y
121- )
122-
123- assert y .is_contiguous (
124- memory_format = torch .channels_last
125- ), "_clone_dim_order output is not in channels_last memory format"
126- assert torch .allclose (x , y )
127-
12894 def test_op_dim_order_update (self ) -> None :
12995 MemoryFormatOpsPassTestUtils .memory_format_test_runner (
13096 self ,
0 commit comments