Skip to content

Commit da97fcf

Browse files
committed
Update on "[ExecuTorch] Add broadcasting support to optimized op_div"
Summary: Similar to broadcast support in op_mul Test Plan: Tests added Reviewers: Subscribers: Tasks: Tags: cc larryliu0820 manuelcandales [ghstack-poisoned]
2 parents 8bdb03d + fdf8742 commit da97fcf

File tree

2 files changed

+3
-6
lines changed

2 files changed

+3
-6
lines changed

kernels/optimized/cpu/op_div.cpp

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -137,15 +137,13 @@ Tensor& opt_div_out(
137137
// to be agnostic of op. This should be fixed, likely by moving lambda
138138
// creation to handle_broadcast_elementwise and it be aware of which op is
139139
// being executed.
140-
auto div_lambda = [](auto x, auto y, auto alpha) {
141-
[[maybe_unused]] alpha;
140+
auto div_lambda = [](auto x, auto y, [[maybe_unused]] auto alpha) {
142141
return y / x;
143142
};
144143
return torch::executor::handle_broadcast_elementwise<op_name>(
145144
ctx, div_lambda, a, b, out, selected_optimized_path);
146145
} else {
147-
auto div_lambda = [](auto x, auto y, auto alpha) {
148-
[[maybe_unused]] alpha;
146+
auto div_lambda = [](auto x, auto y, [[maybe_unused]] auto alpha) {
149147
return x / y;
150148
};
151149
return torch::executor::handle_broadcast_elementwise<op_name>(

kernels/optimized/cpu/op_mul.cpp

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,8 +133,7 @@ Tensor& opt_mul_out(
133133
// Reason for using alpha even when used for mul is becasuse
134134
// handle_broadcast_elementwise is used for add and sub as well
135135
// and it uses alpha.
136-
auto mul_lambda = [](auto x, auto y, auto alpha) {
137-
[[maybe_unused]] alpha;
136+
auto mul_lambda = [](auto x, auto y, [[maybe_unused]] auto alpha) {
138137
return x * y;
139138
};
140139
static constexpr const char op_name[] = "mul.out";

0 commit comments

Comments
 (0)