File tree Expand file tree Collapse file tree 5 files changed +36
-7
lines changed Expand file tree Collapse file tree 5 files changed +36
-7
lines changed Original file line number Diff line number Diff line change @@ -17,6 +17,7 @@ runtime.python_library(
17
17
name = "backend",
18
18
srcs = glob([
19
19
"compiler/*.py",
20
+ "logging.py",
20
21
]),
21
22
visibility = [
22
23
"@EXECUTORCH_CLIENTS",
@@ -33,6 +34,7 @@ runtime.python_library(
33
34
name = "partitioner",
34
35
srcs = glob([
35
36
"partition/*.py",
37
+ "logging.py",
36
38
]),
37
39
visibility = [
38
40
"@EXECUTORCH_CLIENTS",
Original file line number Diff line number Diff line change 16
16
17
17
import coremltools as ct
18
18
import coremltools .optimize as cto
19
-
20
19
from executorch .backends .apple .coreml import executorchcoreml
20
+ from executorch .backends .apple .coreml .logging import get_coreml_log_level
21
21
from executorch .exir .backend .backend_details import (
22
22
BackendDetails ,
23
23
ExportedProgram ,
24
24
PreprocessResult ,
25
25
)
26
26
from executorch .exir .backend .compile_spec_schema import CompileSpec
27
27
28
- logger = logging .getLogger (__name__ )
29
- logger .setLevel (logging .WARNING )
30
-
31
28
from executorch .backends .apple .coreml .compiler .torch_ops import * # noqa: F401, F403
32
29
30
+ logger = logging .getLogger (__name__ )
31
+ logger .setLevel (get_coreml_log_level (default_level = logging .WARNING ))
32
+
33
33
34
34
class COMPILE_SPEC_KEYS (Enum ):
35
35
COMPUTE_UNITS = "compute_units"
@@ -409,6 +409,7 @@ def preprocess(
409
409
edge_program : ExportedProgram ,
410
410
compile_specs : List [CompileSpec ],
411
411
) -> PreprocessResult :
412
+ logger .info (f"Edge program: { edge_program } " )
412
413
model_type : CoreMLBackend .MODEL_TYPE = (
413
414
CoreMLBackend .model_type_from_compile_specs (
414
415
compile_specs ,
Original file line number Diff line number Diff line change 9
9
# the op to the coremltools library.
10
10
11
11
import torch as _torch
12
- from coremltools import _logger as logger
12
+ from coremltools import _logger
13
13
from coremltools .converters .mil .frontend import _utils
14
14
from coremltools .converters .mil .frontend .torch .ops import (
15
15
_get_inputs ,
@@ -111,7 +111,7 @@ def dequantize_affine(context, node):
111
111
out_np_dtype = None
112
112
if len (inputs ) > 7 :
113
113
out_np_dtype = NUM_TO_NUMPY_DTYPE [inputs [7 ].val ]
114
- logger .warning (
114
+ _logger .warning (
115
115
f"Core ML ignores output_dtype { out_np_dtype } on torchao.dequantize_affine and instead uses the native precision."
116
116
)
117
117
Original file line number Diff line number Diff line change
1
+ # Copyright © 2023 Apple Inc. All rights reserved.
2
+ #
3
+ # Please refer to the license found in the LICENSE file in the root directory of the source tree.
4
+
5
+ import logging
6
+ import os
7
+ from typing import Optional
8
+
9
+
10
+ def get_coreml_log_level (default_level : int ) -> Optional [str ]:
11
+ level_str = os .environ .get ("ET_COREML_LOG_LEVEL" , "" ).upper ()
12
+ if level_str == "" :
13
+ return default_level
14
+
15
+ level_map = {
16
+ "DEBUG" : logging .DEBUG ,
17
+ "INFO" : logging .INFO ,
18
+ "WARNING" : logging .WARNING ,
19
+ "ERROR" : logging .ERROR ,
20
+ "CRITICAL" : logging .CRITICAL ,
21
+ }
22
+ if level_str not in level_map :
23
+ raise ValueError (f"Invalid ET_COREML_LOG_LEVEL: { level_str } " )
24
+ return level_map [level_str ]
Original file line number Diff line number Diff line change 10
10
import torch
11
11
12
12
from executorch .backends .apple .coreml .compiler import CoreMLBackend
13
+
14
+ from executorch .backends .apple .coreml .logging import get_coreml_log_level
13
15
from executorch .exir .backend .compile_spec_schema import CompileSpec
14
16
15
17
from executorch .exir .backend .partitioner import (
23
25
from torch .fx .passes .operator_support import OperatorSupportBase
24
26
25
27
logger = logging .getLogger (__name__ )
26
- logger .setLevel (logging .INFO )
28
+ logger .setLevel (get_coreml_log_level ( default_level = logging .INFO ) )
27
29
28
30
29
31
def _is_view_op (op : torch ._ops .OpOverload ) -> bool :
You can’t perform that action at this time.
0 commit comments