Skip to content
This repository was archived by the owner on Aug 21, 2025. It is now read-only.

Commit 5b88cf2

Browse files
committed
Fix build
1 parent 0f5cd36 commit 5b88cf2

File tree

1 file changed

+0
-18
lines changed

1 file changed

+0
-18
lines changed

functorch/csrc/VmapGeneratedPlumbing.h

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -15695,24 +15695,6 @@ at::Tensor scatter_add_dimname_generated_plumbing(const at::Tensor & self, at::D
1569515695
return makeBatched(std::get<0>(results), std::get<1>(results), cur_level);
1569615696
}
1569715697
template <typename batch_rule_t, batch_rule_t batch_rule>
15698-
at::Tensor scatter_reduce_two_generated_plumbing(const at::Tensor & self, int64_t dim, const at::Tensor & index, c10::string_view reduce, c10::optional<int64_t> output_size) {
15699-
c10::impl::ExcludeDispatchKeyGuard guard(kBatchedKey);
15700-
auto maybe_layer = maybeCurrentDynamicLayer();
15701-
TORCH_INTERNAL_ASSERT(maybe_layer.has_value());
15702-
int64_t cur_level = maybe_layer->layerId();
15703-
if (!isBatchedAtLevel(self, cur_level) && !isBatchedAtLevel(index, cur_level)) {
15704-
return at::_ops::scatter_reduce_two::call(self, dim, index, reduce, output_size);
15705-
}
15706-
Tensor self_value;
15707-
optional<int64_t> self_bdim;
15708-
std::tie(self_value, self_bdim) = unwrapTensorAtLevel(self, cur_level);
15709-
Tensor index_value;
15710-
optional<int64_t> index_bdim;
15711-
std::tie(index_value, index_bdim) = unwrapTensorAtLevel(index, cur_level);
15712-
auto results = batch_rule(self_value, self_bdim, dim, index_value, index_bdim, reduce, output_size);
15713-
return makeBatched(std::get<0>(results), std::get<1>(results), cur_level);
15714-
}
15715-
template <typename batch_rule_t, batch_rule_t batch_rule>
1571615698
at::Tensor & eq__Scalar_generated_plumbing(at::Tensor & self, const at::Scalar & other) {
1571715699
c10::impl::ExcludeDispatchKeyGuard guard(kBatchedKey);
1571815700
auto maybe_layer = maybeCurrentDynamicLayer();

0 commit comments

Comments
 (0)