Skip to content

Commit c16c2f1

Browse files
committed
refactor break_scope
1 parent ee227c2 commit c16c2f1

File tree

2 files changed

+155
-169
lines changed

2 files changed

+155
-169
lines changed

compiler/rustc_mir_build/src/builder/expr/stmt.rs

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,12 +98,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
9898
ExprKind::Break { label, value } => {
9999
this.break_scope(block, value, BreakableTarget::Break(label), source_info)
100100
}
101-
ExprKind::ConstContinue { label, value } => this.break_scope(
102-
block,
103-
Some(value),
104-
BreakableTarget::ConstContinue(label),
105-
source_info,
106-
),
101+
ExprKind::ConstContinue { label, value } => {
102+
this.break_const_continuable_scope(block, value, label, source_info)
103+
}
107104
ExprKind::Return { value } => {
108105
this.break_scope(block, value, BreakableTarget::Return, source_info)
109106
}

compiler/rustc_mir_build/src/builder/scope.rs

Lines changed: 152 additions & 163 deletions
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,6 @@ struct IfThenScope {
207207
pub(crate) enum BreakableTarget {
208208
Continue(region::Scope),
209209
Break(region::Scope),
210-
ConstContinue(region::Scope),
211210
Return,
212211
}
213212

@@ -753,168 +752,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
753752
let break_index = get_scope_index(scope);
754753
(break_index, None)
755754
}
756-
BreakableTarget::ConstContinue(scope) => {
757-
let Some(value) = value else {
758-
span_bug!(span, "#[const_continue] must break with a value")
759-
};
760-
761-
// A break can only break out of a scope, so the value should be a scope
762-
let rustc_middle::thir::ExprKind::Scope { value, .. } = self.thir[value].kind
763-
else {
764-
span_bug!(span, "break value must be a scope")
765-
};
766-
767-
let break_index = self
768-
.scopes
769-
.const_continuable_scopes
770-
.iter()
771-
.rposition(|const_continuable_scope| {
772-
const_continuable_scope.region_scope == scope
773-
})
774-
.unwrap_or_else(|| {
775-
span_bug!(span, "no enclosing const-continuable scope found")
776-
});
777-
778-
let scope = &self.scopes.const_continuable_scopes[break_index];
779-
780-
let state_ty = self.local_decls[scope.state_place.as_local().unwrap()].ty;
781-
let discriminant_ty = match state_ty {
782-
ty if ty.is_enum() => ty.discriminant_ty(self.tcx),
783-
ty if ty.is_integral() => ty,
784-
_ => todo!(),
785-
};
786-
787-
let rvalue = match state_ty {
788-
ty if ty.is_enum() => Rvalue::Discriminant(scope.state_place),
789-
ty if ty.is_integral() => Rvalue::Use(Operand::Copy(scope.state_place)),
790-
_ => todo!(),
791-
};
792-
793-
// the PatCtxt is normally used in pattern exhaustiveness checking, but reused here
794-
// because it performs normalization and const evaluation.
795-
let dropless_arena = rustc_arena::DroplessArena::default();
796-
let typeck_results = self.tcx.typeck(self.def_id);
797-
let cx = RustcPatCtxt {
798-
tcx: self.tcx,
799-
typeck_results,
800-
module: self.tcx.parent_module(self.hir_id).to_def_id(),
801-
// FIXME(#132279): We're in a body, should handle opaques.
802-
typing_env: rustc_middle::ty::TypingEnv::non_body_analysis(
803-
self.tcx,
804-
self.def_id,
805-
),
806-
dropless_arena: &dropless_arena,
807-
match_lint_level: self.hir_id,
808-
whole_match_span: Some(rustc_span::Span::default()),
809-
scrut_span: rustc_span::Span::default(),
810-
refutable: true,
811-
known_valid_scrutinee: true,
812-
};
813-
814-
let Some(real_target) =
815-
self.static_pattern_match(&cx, value, &*scope.arms, &scope.built_match_tree)
816-
else {
817-
self.tcx.dcx().emit_fatal(ConstContinueUnknownJumpTarget { span })
818-
};
819-
820-
self.block_context.push(BlockFrame::SubExpr);
821-
let state_place = scope.state_place;
822-
block = self.expr_into_dest(state_place, block, value).into_block();
823-
self.block_context.pop();
824-
825-
let discr = self.temp(discriminant_ty, source_info.span);
826-
let scope_index = self.scopes.scope_index(
827-
self.scopes.const_continuable_scopes[break_index].region_scope,
828-
span,
829-
);
830-
let scope = &mut self.scopes.const_continuable_scopes[break_index];
831-
self.cfg.push_assign(block, source_info, discr, rvalue);
832-
let drop_and_continue_block = self.cfg.start_new_block();
833-
let imaginary_target = self.cfg.start_new_block();
834-
self.cfg.terminate(
835-
block,
836-
source_info,
837-
TerminatorKind::FalseEdge {
838-
real_target: drop_and_continue_block,
839-
imaginary_target,
840-
},
841-
);
842-
843-
let drops = &mut scope.break_drops;
844-
845-
let drop_idx = self.scopes.scopes[scope_index + 1..]
846-
.iter()
847-
.flat_map(|scope| &scope.drops)
848-
.fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
849-
850-
drops.add_entry_point(imaginary_target, drop_idx);
851-
852-
self.cfg.terminate(imaginary_target, source_info, TerminatorKind::UnwindResume);
853-
854-
// FIXME add to drop tree for loop_head
855-
856-
let region_scope = scope.region_scope;
857-
let scope_index = self.scopes.scope_index(region_scope, span);
858-
let mut drops = DropTree::new();
859-
860-
let drop_idx = self.scopes.scopes[scope_index + 1..]
861-
.iter()
862-
.flat_map(|scope| &scope.drops)
863-
.fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
864-
865-
drops.add_entry_point(drop_and_continue_block, drop_idx);
866-
867-
// `build_drop_trees` doesn't have access to our source_info, so we
868-
// create a dummy terminator now. `TerminatorKind::UnwindResume` is used
869-
// because MIR type checking will panic if it hasn't been overwritten.
870-
// (See `<ExitScopes as DropTreeBuilder>::link_entry_point`.)
871-
self.cfg.terminate(
872-
drop_and_continue_block,
873-
source_info,
874-
TerminatorKind::UnwindResume,
875-
);
876-
877-
{
878-
let this = &mut *self;
879-
let blocks = drops.build_mir::<ExitScopes>(&mut this.cfg, Some(real_target));
880-
//let is_coroutine = this.coroutine.is_some();
881-
882-
/*// Link the exit drop tree to unwind drop tree.
883-
if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
884-
let unwind_target = this.diverge_cleanup_target(region_scope, span);
885-
let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
886-
for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) {
887-
match drop_node.data.kind {
888-
DropKind::Storage | DropKind::ForLint => {
889-
if is_coroutine {
890-
let unwind_drop = this.scopes.unwind_drops.add_drop(
891-
drop_node.data,
892-
unwind_indices[drop_node.next],
893-
);
894-
unwind_indices.push(unwind_drop);
895-
} else {
896-
unwind_indices.push(unwind_indices[drop_node.next]);
897-
}
898-
}
899-
DropKind::Value => {
900-
let unwind_drop = this
901-
.scopes
902-
.unwind_drops
903-
.add_drop(drop_node.data, unwind_indices[drop_node.next]);
904-
this.scopes.unwind_drops.add_entry_point(
905-
blocks[drop_idx].unwrap(),
906-
unwind_indices[drop_node.next],
907-
);
908-
unwind_indices.push(unwind_drop);
909-
}
910-
}
911-
}
912-
}*/
913-
blocks[ROOT_NODE].map(BasicBlock::unit)
914-
};
915-
916-
return self.cfg.start_new_block().unit();
917-
}
918755
};
919756

920757
match (destination, value) {
@@ -975,6 +812,158 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
975812
self.cfg.start_new_block().unit()
976813
}
977814

815+
/// Sets up the drops for jumping from `block` to `scope`.
816+
pub(crate) fn break_const_continuable_scope(
817+
&mut self,
818+
mut block: BasicBlock,
819+
value: ExprId,
820+
scope: region::Scope,
821+
source_info: SourceInfo,
822+
) -> BlockAnd<()> {
823+
let span = source_info.span;
824+
825+
// A break can only break out of a scope, so the value should be a scope.
826+
let rustc_middle::thir::ExprKind::Scope { value, .. } = self.thir[value].kind else {
827+
span_bug!(span, "break value must be a scope")
828+
};
829+
830+
let break_index = self
831+
.scopes
832+
.const_continuable_scopes
833+
.iter()
834+
.rposition(|const_continuable_scope| const_continuable_scope.region_scope == scope)
835+
.unwrap_or_else(|| span_bug!(span, "no enclosing const-continuable scope found"));
836+
837+
let scope = &self.scopes.const_continuable_scopes[break_index];
838+
839+
let state_ty = self.local_decls[scope.state_place.as_local().unwrap()].ty;
840+
let discriminant_ty = match state_ty {
841+
ty if ty.is_enum() => ty.discriminant_ty(self.tcx),
842+
ty if ty.is_integral() => ty,
843+
_ => todo!(),
844+
};
845+
846+
let rvalue = match state_ty {
847+
ty if ty.is_enum() => Rvalue::Discriminant(scope.state_place),
848+
ty if ty.is_integral() => Rvalue::Use(Operand::Copy(scope.state_place)),
849+
_ => todo!(),
850+
};
851+
852+
// the PatCtxt is normally used in pattern exhaustiveness checking, but reused here
853+
// because it performs normalization and const evaluation.
854+
let dropless_arena = rustc_arena::DroplessArena::default();
855+
let typeck_results = self.tcx.typeck(self.def_id);
856+
let cx = RustcPatCtxt {
857+
tcx: self.tcx,
858+
typeck_results,
859+
module: self.tcx.parent_module(self.hir_id).to_def_id(),
860+
// FIXME(#132279): We're in a body, should handle opaques.
861+
typing_env: rustc_middle::ty::TypingEnv::non_body_analysis(self.tcx, self.def_id),
862+
dropless_arena: &dropless_arena,
863+
match_lint_level: self.hir_id,
864+
whole_match_span: Some(rustc_span::Span::default()),
865+
scrut_span: rustc_span::Span::default(),
866+
refutable: true,
867+
known_valid_scrutinee: true,
868+
};
869+
870+
let Some(real_target) =
871+
self.static_pattern_match(&cx, value, &*scope.arms, &scope.built_match_tree)
872+
else {
873+
self.tcx.dcx().emit_fatal(ConstContinueUnknownJumpTarget { span })
874+
};
875+
876+
self.block_context.push(BlockFrame::SubExpr);
877+
let state_place = scope.state_place;
878+
block = self.expr_into_dest(state_place, block, value).into_block();
879+
self.block_context.pop();
880+
881+
let discr = self.temp(discriminant_ty, source_info.span);
882+
let scope_index = self
883+
.scopes
884+
.scope_index(self.scopes.const_continuable_scopes[break_index].region_scope, span);
885+
let scope = &mut self.scopes.const_continuable_scopes[break_index];
886+
self.cfg.push_assign(block, source_info, discr, rvalue);
887+
let drop_and_continue_block = self.cfg.start_new_block();
888+
let imaginary_target = self.cfg.start_new_block();
889+
self.cfg.terminate(
890+
block,
891+
source_info,
892+
TerminatorKind::FalseEdge { real_target: drop_and_continue_block, imaginary_target },
893+
);
894+
895+
let drops = &mut scope.break_drops;
896+
897+
let drop_idx = self.scopes.scopes[scope_index + 1..]
898+
.iter()
899+
.flat_map(|scope| &scope.drops)
900+
.fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
901+
902+
drops.add_entry_point(imaginary_target, drop_idx);
903+
904+
self.cfg.terminate(imaginary_target, source_info, TerminatorKind::UnwindResume);
905+
906+
// FIXME add to drop tree for loop_head
907+
908+
let region_scope = scope.region_scope;
909+
let scope_index = self.scopes.scope_index(region_scope, span);
910+
let mut drops = DropTree::new();
911+
912+
let drop_idx = self.scopes.scopes[scope_index + 1..]
913+
.iter()
914+
.flat_map(|scope| &scope.drops)
915+
.fold(ROOT_NODE, |drop_idx, &drop| drops.add_drop(drop, drop_idx));
916+
917+
drops.add_entry_point(drop_and_continue_block, drop_idx);
918+
919+
// `build_drop_trees` doesn't have access to our source_info, so we
920+
// create a dummy terminator now. `TerminatorKind::UnwindResume` is used
921+
// because MIR type checking will panic if it hasn't been overwritten.
922+
// (See `<ExitScopes as DropTreeBuilder>::link_entry_point`.)
923+
self.cfg.terminate(drop_and_continue_block, source_info, TerminatorKind::UnwindResume);
924+
925+
{
926+
let this = &mut *self;
927+
let blocks = drops.build_mir::<ExitScopes>(&mut this.cfg, Some(real_target));
928+
//let is_coroutine = this.coroutine.is_some();
929+
930+
/*// Link the exit drop tree to unwind drop tree.
931+
if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
932+
let unwind_target = this.diverge_cleanup_target(region_scope, span);
933+
let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
934+
for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) {
935+
match drop_node.data.kind {
936+
DropKind::Storage | DropKind::ForLint => {
937+
if is_coroutine {
938+
let unwind_drop = this.scopes.unwind_drops.add_drop(
939+
drop_node.data,
940+
unwind_indices[drop_node.next],
941+
);
942+
unwind_indices.push(unwind_drop);
943+
} else {
944+
unwind_indices.push(unwind_indices[drop_node.next]);
945+
}
946+
}
947+
DropKind::Value => {
948+
let unwind_drop = this
949+
.scopes
950+
.unwind_drops
951+
.add_drop(drop_node.data, unwind_indices[drop_node.next]);
952+
this.scopes.unwind_drops.add_entry_point(
953+
blocks[drop_idx].unwrap(),
954+
unwind_indices[drop_node.next],
955+
);
956+
unwind_indices.push(unwind_drop);
957+
}
958+
}
959+
}
960+
}*/
961+
blocks[ROOT_NODE].map(BasicBlock::unit)
962+
};
963+
964+
return self.cfg.start_new_block().unit();
965+
}
966+
978967
/// Sets up the drops for breaking from `block` due to an `if` condition
979968
/// that turned out to be false.
980969
///

0 commit comments

Comments
 (0)