@@ -5,7 +5,7 @@ def get_compiler_optimization_flags():
55 # App size regressons requires this to be baktraced until I have a better solution
66 return []
77
8- def op_target (name , deps = [], android_deps = [], _allow_third_party_deps = False , _aten_mode_deps = []):
8+ def op_target (name , deps = [], android_deps = [], _allow_third_party_deps = False , _aten_mode_deps = [], exposed_as_util = False ):
99 """Registers an implementation of an operator overload group.
1010
1111 An operator overload group is a set of operator overloads with a common
@@ -45,6 +45,8 @@ def op_target(name, deps = [], android_deps = [], _allow_third_party_deps = Fals
4545 from third-party optimization libraries.
4646 _aten_mode_deps: List of deps to add to the cxx_library() when building
4747 for ATen mode.
48+ exposed_as_util: If True, this op has a utils namespace that should be exposed
49+ as a separate library target for reuse by other operators.
4850 """
4951
5052 # Note that this doesn't actually define the target, but helps register
@@ -55,6 +57,7 @@ def op_target(name, deps = [], android_deps = [], _allow_third_party_deps = Fals
5557 "name" : name ,
5658 "_allow_third_party_deps" : _allow_third_party_deps ,
5759 "_aten_mode_deps" : _aten_mode_deps ,
60+ "exposed_as_util" : exposed_as_util ,
5861 }
5962
6063def _enforce_deps (deps , name , allow_third_party_deps ):
@@ -154,7 +157,7 @@ def define_op_library(name, deps, android_deps, aten_target, _allow_third_party_
154157 link_whole = True ,
155158 )
156159
157- def define_op_target (name , deps , android_deps , is_aten_op , is_et_op = True , _allow_third_party_deps = False , _aten_mode_deps = []):
160+ def define_op_target (name , deps , android_deps , is_aten_op , is_et_op = True , _allow_third_party_deps = False , _aten_mode_deps = [], exposed_as_util = False ):
158161 """Possibly defines cxx_library targets for the named operator group.
159162
160163 Args:
@@ -166,8 +169,37 @@ def define_op_target(name, deps, android_deps, is_aten_op, is_et_op = True, _all
166169 _allow_third_party_deps: If True, the op is allowed to depend on
167170 third-party deps outside of //executorch. Should only be used by
168171 targets under //executorch/kernels/optimized.
172+ exposed_as_util: If True, this op has a utils namespace that should be exposed
173+ as a separate library target for reuse by other operators.
169174 """
170175
176+ # If this op has utils, create a separate utils library target
177+ if exposed_as_util :
178+ utils_name = name + "_util"
179+ runtime .cxx_library (
180+ name = utils_name ,
181+ srcs = ["{}.cpp" .format (name )],
182+ exported_headers = ["{}.h" .format (name )],
183+ visibility = [
184+ "//executorch/kernels/portable/..." ,
185+ "//executorch/kernels/quantized/..." ,
186+ "//executorch/kernels/optimized/..." ,
187+ "//executorch/kernels/test/..." ,
188+ "@EXECUTORCH_CLIENTS" ,
189+ ],
190+ fbandroid_platform_deps = android_deps ,
191+ compiler_flags = select ({
192+ "DEFAULT" : ["-Wno-missing-prototypes" ],
193+ "ovr_config//os:windows" : [],
194+ }) + (
195+ ["-fvisibility=hidden" ] if is_xplat () else []
196+ ) + get_compiler_optimization_flags (),
197+ deps = [
198+ "//executorch/runtime/kernel:kernel_includes" ,
199+ ] + deps ,
200+ force_static = True ,
201+ )
202+
171203 # If this is a custom op, define a target that builds it with at::Tensor
172204 # so that it can be imported into a host PyTorch environment for authoring.
173205 if not is_aten_op and True in get_aten_mode_options ():
@@ -226,6 +258,7 @@ ATEN_OPS = (
226258 "//executorch/kernels/portable/cpu/util:kernel_ops_util" ,
227259 ":scalar_utils" ,
228260 ],
261+ exposed_as_util = True ,
229262 ),
230263 op_target (
231264 name = "op_addmm" ,
@@ -1194,6 +1227,7 @@ ATEN_OPS = (
11941227 deps = [
11951228 "//executorch/kernels/portable/cpu/util:copy_ops_util" ,
11961229 ],
1230+ exposed_as_util = True ,
11971231 ),
11981232 op_target (
11991233 name = "op_sub" ,
0 commit comments