Skip to content

Commit dcc6441

Browse files
authored
Update coremltools to 9b1 (#13614)
This updates main to use coremltools 9.0b1. We will pin the ExecuTorch GA release to use the official coremltools 9.0 release when it comes out.
1 parent 26c93df commit dcc6441

File tree

3 files changed

+43
-19
lines changed

3 files changed

+43
-19
lines changed

backends/apple/coreml/compiler/torch_ops.py

Lines changed: 41 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -47,24 +47,48 @@ def split_copy(context, node):
4747
split(context, node)
4848

4949

50-
@register_torch_op(
51-
torch_alias=[
52-
"dim_order_ops::_to_dim_order_copy",
53-
"dim_order_ops._to_dim_order_copy",
54-
],
55-
override=False,
56-
)
57-
def _to_dim_order_copy(context, node):
58-
dim_order = _get_kwinputs(context, node, "dim_order", default=[None])[0]
59-
node.kwinputs.pop("dim_order")
50+
def is_fbcode():
51+
return not hasattr(_torch.version, "git_version")
6052

61-
# In CoreML, dim_order.val will be an ndarray, so we convert it to a list
62-
dim_order = [int(d) for d in dim_order.val]
63-
memory_format = get_memory_format(dim_order)
64-
assert (
65-
memory_format == _torch.contiguous_format
66-
), "Only contiguous memory format is supported in CoreML"
67-
to(context, node)
53+
54+
if not is_fbcode():
55+
from coremltools.converters.mil.frontend.torch.dim_order_ops import (
56+
_empty_dim_order,
57+
_to_dim_order_copy,
58+
)
59+
60+
# This is a temporary hack to register the alias "dim_order_ops._to_dim_order_copy",
61+
# which was missed by coremltools
62+
@register_torch_op(torch_alias=["dim_order_ops._to_dim_order_copy"], override=False)
63+
def _to_dim_order_copy_TMP_EXECUTORCH_ALIAS_HACK(context, node):
64+
_to_dim_order_copy(context, node)
65+
66+
# This is a temporary hack to register the alias "dim_order_ops._empty_dim_order",
67+
# which was missed by coremltools
68+
@register_torch_op(torch_alias=["dim_order_ops._empty_dim_order"], override=False)
69+
def _empty_dim_order_TMP_EXECUTORCH_ALIAS_HACK(context, node):
70+
_empty_dim_order(context, node)
71+
72+
else:
73+
# TODO: remove this case when fbcode updates to coremltools 9.0
74+
@register_torch_op(
75+
torch_alias=[
76+
"dim_order_ops::_to_dim_order_copy",
77+
"dim_order_ops._to_dim_order_copy",
78+
],
79+
override=False,
80+
)
81+
def _to_dim_order_copy(context, node):
82+
dim_order = _get_kwinputs(context, node, "dim_order", default=[None])[0]
83+
node.kwinputs.pop("dim_order")
84+
85+
# In CoreML, dim_order.val will be an ndarray, so we convert it to a list
86+
dim_order = [int(d) for d in dim_order.val]
87+
memory_format = get_memory_format(dim_order)
88+
assert (
89+
memory_format == _torch.contiguous_format
90+
), "Only contiguous memory format is supported in CoreML"
91+
to(context, node)
6892

6993

7094
# https://github.com/apple/coremltools/pull/2558

backends/apple/coreml/scripts/install_requirements.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ SCRIPT_DIR_PATH="$(
1212

1313
# TODO(jathu): remove the need to fetch coremltools to build deps for coreml_executor_runner.
1414
# Keep this version in sync with: pyproject.toml
15-
COREMLTOOLS_VERSION="8.3"
15+
COREMLTOOLS_VERSION="9.0b1"
1616

1717
red=`tput setaf 1`
1818
green=`tput setaf 2`

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ dependencies=[
7171
# See also third-party/TARGETS for buck's typing-extensions version.
7272
"typing-extensions>=4.10.0",
7373
# Keep this version in sync with: ./backends/apple/coreml/scripts/install_requirements.sh
74-
"coremltools==8.3; platform_system == 'Darwin' or platform_system == 'Linux'",
74+
"coremltools==9.0b1; platform_system == 'Darwin' or platform_system == 'Linux'",
7575
# scikit-learn is used to support palettization in the coreml backend
7676
"scikit-learn==1.7.1",
7777
"hydra-core>=1.3.0",

0 commit comments

Comments
 (0)