@@ -120,46 +120,22 @@ Tensor& opt_div_out(
120120          out.numel ());
121121    });
122122  } else  if  (selected_optimized_path != ElementwiseOptimizedPath::kNone ) {
123-     const  Tensor* lhs;
124-     const  Tensor* rhs;
125-     if  (selected_optimized_path ==
126-         ElementwiseOptimizedPath::kBroadcast2dBy1dReverseArguments ) {
127-       lhs = &b;
128-       rhs = &a;
129-     } else  {
130-       //  Catch failure to update logic when subing new broadcasting possibility.
131-       ET_DCHECK (
132-           selected_optimized_path ==
133-           ElementwiseOptimizedPath::kBroadcast2dBy1d );
134-       lhs = &a;
135-       rhs = &b;
136-     }
137-     auto  error = resize_tensor (out, lhs->sizes ());
138-     ET_KERNEL_CHECK_MSG (
139-         ctx,
140-         error == Error::Ok,
141-         InvalidArgument,
142-         out,
143-         " Failed to resize output tensor."  );
144-     ET_SWITCH_REALB_TYPES (out_type, ctx, " sub.out"  , CTYPE, [&]() {
145-       using  Vec = executorch::vec::Vectorized<CTYPE>;
123+     //  Reason for using alpha is becasuse handle_broadcast_elementwise
124+     //  is used for add and sub as well:
125+     ET_SWITCH_REALB_TYPES (out_type, ctx, " div.out"  , CTYPE, [&]() {
146126      if  (selected_optimized_path ==
147-           ElementwiseOptimizedPath::kBroadcast2dBy1dReverseArguments ) { 
148-         executorch::vec::broadcasting_map_2d_by_1d<CTYPE>( 
149-             [](Vec x, Vec y) {  return  y / x; }, 
150-             out. mutable_data_ptr <CTYPE>(), 
151-             lhs-> const_data_ptr <CTYPE>(), 
152-             rhs-> const_data_ptr <CTYPE>(), 
153-             lhs-> sizes ()[lhs-> dim () -  2 ], 
154-             lhs-> sizes ()[lhs-> dim () -  1 ] );
127+                ElementwiseOptimizedPath::kBroadcast2dBy1dReverseArguments  || 
128+           selected_optimized_path == 
129+               ElementwiseOptimizedPath:: kBroadcastLastDimReverseArguments  || 
130+           selected_optimized_path == 
131+               ElementwiseOptimizedPath:: kBroadcastNdByNdReverseArguments ) { 
132+         auto  div_lambda = []( auto  x,  auto  y) {  return  y / x; }; 
133+         return  torch::executor::handle_broadcast_elementwise<CTYPE>( 
134+             ctx, div_lambda, a, b, out, selected_optimized_path );
155135      } else  {
156-         executorch::vec::broadcasting_map_2d_by_1d<CTYPE>(
157-             [](Vec x, Vec y) { return  x / y; },
158-             out.mutable_data_ptr <CTYPE>(),
159-             lhs->const_data_ptr <CTYPE>(),
160-             rhs->const_data_ptr <CTYPE>(),
161-             lhs->sizes ()[lhs->dim () - 2 ],
162-             lhs->sizes ()[lhs->dim () - 1 ]);
136+         auto  div_lambda = [](auto  x, auto  y) { return  x / y; };
137+         return  torch::executor::handle_broadcast_elementwise<CTYPE>(
138+             ctx, div_lambda, a, b, out, selected_optimized_path);
163139      }
164140    });
165141  } else  {
0 commit comments