1212import torch as _torch
1313from coremltools import _logger
1414from coremltools .converters .mil .frontend import _utils
15- from coremltools .converters .mil .frontend .torch .dom_order_ops import (
16- _empty_dim_order ,
17- _to_dim_order_copy ,
18- )
1915from coremltools .converters .mil .frontend .torch .ops import (
2016 _get_inputs ,
17+ _get_kwinputs ,
2118 NUM_TO_NUMPY_DTYPE ,
2219 NUM_TO_TORCH_DTYPE ,
2320 split ,
21+ to ,
2422 transpose ,
2523 unbind ,
2624)
2725from coremltools .converters .mil .frontend .torch .torch_op_registry import (
2826 register_torch_op ,
2927)
3028from coremltools .converters .mil .mil import types
29+ from executorch .exir .dim_order_utils import get_memory_format
3130
3231
3332# https://github.com/apple/coremltools/pull/2556
@@ -48,18 +47,48 @@ def split_copy(context, node):
4847 split (context , node )
4948
5049
51- # This is a temporary hack to register the alias "dim_order_ops._to_dim_order_copy",
52- # which was missed by coremltools
53- @register_torch_op (torch_alias = ["dim_order_ops._to_dim_order_copy" ], override = False )
54- def _to_dim_order_copy_TMP_EXECUTORCH_ALIAS_HACK (context , node ):
55- _to_dim_order_copy (context , node )
50+ def is_fbcode ():
51+ return not hasattr (_torch .version , "git_version" )
52+
5653
54+ if not is_fbcode ():
55+ from coremltools .converters .mil .frontend .torch .dim_order_ops import (
56+ _empty_dim_order ,
57+ _to_dim_order_copy ,
58+ )
5759
58- # This is a temporary hack to register the alias "dim_order_ops._empty_dim_order",
59- # which was missed by coremltools
60- @register_torch_op (torch_alias = ["dim_order_ops._empty_dim_order" ], override = False )
61- def _empty_dim_order_TMP_EXECUTORCH_ALIAS_HACK (context , node ):
62- _empty_dim_order (context , node )
60+ # This is a temporary hack to register the alias "dim_order_ops._to_dim_order_copy",
61+ # which was missed by coremltools
62+ @register_torch_op (torch_alias = ["dim_order_ops._to_dim_order_copy" ], override = False )
63+ def _to_dim_order_copy_TMP_EXECUTORCH_ALIAS_HACK (context , node ):
64+ _to_dim_order_copy (context , node )
65+
66+ # This is a temporary hack to register the alias "dim_order_ops._empty_dim_order",
67+ # which was missed by coremltools
68+ @register_torch_op (torch_alias = ["dim_order_ops._empty_dim_order" ], override = False )
69+ def _empty_dim_order_TMP_EXECUTORCH_ALIAS_HACK (context , node ):
70+ _empty_dim_order (context , node )
71+
72+ else :
73+ # TODO: remove this case when fbcode updates to coremltools 9.0
74+ @register_torch_op (
75+ torch_alias = [
76+ "dim_order_ops::_to_dim_order_copy" ,
77+ "dim_order_ops._to_dim_order_copy" ,
78+ ],
79+ override = False ,
80+ )
81+ def _to_dim_order_copy (context , node ):
82+ dim_order = _get_kwinputs (context , node , "dim_order" , default = [None ])[0 ]
83+ node .kwinputs .pop ("dim_order" )
84+
85+ # In CoreML, dim_order.val will be an ndarray, so we convert it to a list
86+ dim_order = [int (d ) for d in dim_order .val ]
87+ memory_format = get_memory_format (dim_order )
88+ assert (
89+ memory_format == _torch .contiguous_format
90+ ), "Only contiguous memory format is supported in CoreML"
91+ to (context , node )
6392
6493
6594# https://github.com/apple/coremltools/pull/2558
0 commit comments