Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 11 pull requests #122337

Closed
wants to merge 31 commits into from
Closed
Changes from 1 commit
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
9a77ec9
Rename `-Zno_parallel_llvm` -> `-Zno_parallel_backend`
WaffleLapkin Feb 15, 2024
f368922
Allow codegen backends to opt-out of parallel codegen
WaffleLapkin Feb 15, 2024
8bb49e2
Propagate the resolved type of assoc const bindings via query feeding
fmease Dec 31, 2023
b94498a
Use existing query feeding workarounds
oli-obk Feb 21, 2024
858d336
Slightly simplify feeding of assoc const eq bounds
fmease Feb 23, 2024
d9a2886
add comment and test: we do not do value-based reasoning for promotio…
RalfJung Feb 29, 2024
30fa6a8
Rename `DropTreeBuilder::add_entry` to `link_entry_point`
Zalathar Mar 6, 2024
3bd8df9
Assert that `link_entry_point` sees the expected dummy terminator
Zalathar Mar 6, 2024
fbdac30
Rename `DropTree::add_entry` to `add_entry_point`
Zalathar Mar 6, 2024
5ba70bd
Replace tuples in `DropTree` with named structs
Zalathar Mar 6, 2024
d673fd8
Remove the unused `field_remapping` field from `TypeLowering`
beetrees Mar 8, 2024
bf47df8
interpret: do not call machine read hooks during validation
RalfJung Mar 9, 2024
58f6aaa
Improve diagnostics for parenthesized type arguments
wutchzone Mar 7, 2024
3830510
Ignore tests w/ current/next revisions from compare-mode=next-solver
compiler-errors Mar 11, 2024
73fc170
Store backtrace for must_produce_diag
compiler-errors Mar 10, 2024
01e6b43
Mark some next-solver-behavior tests explicitly with revisions
compiler-errors Mar 11, 2024
aea60b0
unix_sigpipe: Replace `inherit` with `sig_dfl` in syntax tests
Enselic Feb 9, 2024
816dc96
bootstrap readme: fix, improve, update
tshepang Mar 11, 2024
279465b
const-checking: add some corner case tests, and fix some nits
RalfJung Mar 2, 2024
fb802f2
promote-not: add test that distinguishes promotion from outer scope rule
RalfJung Mar 11, 2024
8f33766
Rollup merge of #116791 - WaffleLapkin:unparallel-backends, r=oli-obk
matthiaskrgr Mar 11, 2024
ccbeb90
Rollup merge of #119385 - fmease:assoc-const-eq-fixes-2, r=oli-obk,cj…
matthiaskrgr Mar 11, 2024
f477194
Rollup merge of #121893 - RalfJung:const-interior-mut-tests, r=oli-obk
matthiaskrgr Mar 11, 2024
627428c
Rollup merge of #122080 - Zalathar:drop-tree, r=oli-obk
matthiaskrgr Mar 11, 2024
23e4968
Rollup merge of #122152 - wutchzone:120892, r=fmease
matthiaskrgr Mar 11, 2024
0623371
Rollup merge of #122166 - beetrees:remove-field-remapping, r=davidtwco
matthiaskrgr Mar 11, 2024
cb48f18
Rollup merge of #122249 - RalfJung:machine-read-hook, r=oli-obk
matthiaskrgr Mar 11, 2024
1961865
Rollup merge of #122299 - compiler-errors:bt-for-must-diag, r=nnether…
matthiaskrgr Mar 11, 2024
1d4ba52
Rollup merge of #122318 - compiler-errors:next-solver-tests, r=lcnr
matthiaskrgr Mar 11, 2024
4a2a60a
Rollup merge of #122328 - Enselic:sig_dfl-not-inherit, r=davidtwco
matthiaskrgr Mar 11, 2024
2099e04
Rollup merge of #122330 - tshepang:patch-1, r=clubby789
matthiaskrgr Mar 11, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Replace tuples in DropTree with named structs
This allows us to use real field names instead of tuple element numbers.

Renaming `previous_drops` to `existing_drops_map` clarifies that "previous" was
unrelated to drop order.
Zalathar committed Mar 6, 2024
commit 5ba70bd3ec6cedfe49332226dc9dbfe427698ad3
107 changes: 64 additions & 43 deletions compiler/rustc_mir_build/src/build/scope.rs
Original file line number Diff line number Diff line change
@@ -203,16 +203,31 @@ const ROOT_NODE: DropIdx = DropIdx::from_u32(0);
/// in `build_mir`.
#[derive(Debug)]
struct DropTree {
/// Drops in the tree.
drops: IndexVec<DropIdx, (DropData, DropIdx)>,
/// Map for finding the inverse of the `next_drop` relation:
///
/// `previous_drops[(drops[i].1, drops[i].0.local, drops[i].0.kind)] == i`
previous_drops: FxHashMap<(DropIdx, Local, DropKind), DropIdx>,
/// Nodes in the drop tree, containing drop data and a link to the next node.
drops: IndexVec<DropIdx, DropNode>,
/// Map for finding the index of an existing node, given its contents.
existing_drops_map: FxHashMap<DropNodeKey, DropIdx>,
/// Edges into the `DropTree` that need to be added once it's lowered.
entry_points: Vec<(DropIdx, BasicBlock)>,
}

/// A single node in the drop tree.
#[derive(Debug)]
struct DropNode {
/// Info about the drop to be performed at this node in the drop tree.
data: DropData,
/// Index of the "next" drop to perform (in drop order, not declaration order).
next: DropIdx,
}

/// Subset of [`DropNode`] used for reverse lookup in a hash table.
#[derive(Debug, PartialEq, Eq, Hash)]
struct DropNodeKey {
next: DropIdx,
local: Local,
kind: DropKind,
}

impl Scope {
/// Whether there's anything to do for the cleanup path, that is,
/// when unwinding through this scope. This includes destructors,
@@ -258,17 +273,22 @@ impl DropTree {
let fake_source_info = SourceInfo::outermost(DUMMY_SP);
let fake_data =
DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
let drop_idx = DropIdx::MAX;
let drops = IndexVec::from_elem_n((fake_data, drop_idx), 1);
Self { drops, entry_points: Vec::new(), previous_drops: FxHashMap::default() }
let drops = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
Self { drops, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
}

fn add_drop(&mut self, drop: DropData, next: DropIdx) -> DropIdx {
/// Adds a node to the drop tree, consisting of drop data and the index of
/// the "next" drop (in drop order), which could be the sentinel [`ROOT_NODE`].
///
/// If there is already an equivalent node in the tree, nothing is added, and
/// that node's index is returned. Otherwise, the new node's index is returned.
fn add_drop(&mut self, data: DropData, next: DropIdx) -> DropIdx {
let drops = &mut self.drops;
*self
.previous_drops
.entry((next, drop.local, drop.kind))
.or_insert_with(|| drops.push((drop, next)))
.existing_drops_map
.entry(DropNodeKey { next, local: data.local, kind: data.kind })
// Create a new node, and also add its index to the map.
.or_insert_with(|| drops.push(DropNode { data, next }))
}

/// Registers `from` as an entry point to this drop tree, at `to`.
@@ -330,7 +350,7 @@ impl DropTree {
let entry_points = &mut self.entry_points;
entry_points.sort();

for (drop_idx, drop_data) in self.drops.iter_enumerated().rev() {
for (drop_idx, drop_node) in self.drops.iter_enumerated().rev() {
if entry_points.last().is_some_and(|entry_point| entry_point.0 == drop_idx) {
let block = *blocks[drop_idx].get_or_insert_with(|| T::make_block(cfg));
needs_block[drop_idx] = Block::Own;
@@ -348,10 +368,10 @@ impl DropTree {
blocks[drop_idx] = blocks[pred];
}
}
if let DropKind::Value = drop_data.0.kind {
needs_block[drop_data.1] = Block::Own;
if let DropKind::Value = drop_node.data.kind {
needs_block[drop_node.next] = Block::Own;
} else if drop_idx != ROOT_NODE {
match &mut needs_block[drop_data.1] {
match &mut needs_block[drop_node.next] {
pred @ Block::None => *pred = Block::Shares(drop_idx),
pred @ Block::Shares(_) => *pred = Block::Own,
Block::Own => (),
@@ -368,34 +388,35 @@ impl DropTree {
cfg: &mut CFG<'tcx>,
blocks: &IndexSlice<DropIdx, Option<BasicBlock>>,
) {
for (drop_idx, drop_data) in self.drops.iter_enumerated().rev() {
for (drop_idx, drop_node) in self.drops.iter_enumerated().rev() {
let Some(block) = blocks[drop_idx] else { continue };
match drop_data.0.kind {
match drop_node.data.kind {
DropKind::Value => {
let terminator = TerminatorKind::Drop {
target: blocks[drop_data.1].unwrap(),
target: blocks[drop_node.next].unwrap(),
// The caller will handle this if needed.
unwind: UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
place: drop_data.0.local.into(),
place: drop_node.data.local.into(),
replace: false,
};
cfg.terminate(block, drop_data.0.source_info, terminator);
cfg.terminate(block, drop_node.data.source_info, terminator);
}
// Root nodes don't correspond to a drop.
DropKind::Storage if drop_idx == ROOT_NODE => {}
DropKind::Storage => {
let stmt = Statement {
source_info: drop_data.0.source_info,
kind: StatementKind::StorageDead(drop_data.0.local),
source_info: drop_node.data.source_info,
kind: StatementKind::StorageDead(drop_node.data.local),
};
cfg.push(block, stmt);
let target = blocks[drop_data.1].unwrap();
let target = blocks[drop_node.next].unwrap();
if target != block {
// Diagnostics don't use this `Span` but debuginfo
// might. Since we don't want breakpoints to be placed
// here, especially when this is on an unwind path, we
// use `DUMMY_SP`.
let source_info = SourceInfo { span: DUMMY_SP, ..drop_data.0.source_info };
let source_info =
SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
let terminator = TerminatorKind::Goto { target };
cfg.terminate(block, source_info, terminator);
}
@@ -1277,9 +1298,9 @@ fn build_scope_drops<'tcx>(
// `unwind_to` should drop the value that we're about to
// schedule. If dropping this value panics, then we continue
// with the *next* value on the unwind path.
debug_assert_eq!(unwind_drops.drops[unwind_to].0.local, drop_data.local);
debug_assert_eq!(unwind_drops.drops[unwind_to].0.kind, drop_data.kind);
unwind_to = unwind_drops.drops[unwind_to].1;
debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
unwind_to = unwind_drops.drops[unwind_to].next;

// If the operand has been moved, and we are not on an unwind
// path, then don't generate the drop. (We only take this into
@@ -1306,9 +1327,9 @@ fn build_scope_drops<'tcx>(
}
DropKind::Storage => {
if storage_dead_on_unwind {
debug_assert_eq!(unwind_drops.drops[unwind_to].0.local, drop_data.local);
debug_assert_eq!(unwind_drops.drops[unwind_to].0.kind, drop_data.kind);
unwind_to = unwind_drops.drops[unwind_to].1;
debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
unwind_to = unwind_drops.drops[unwind_to].next;
}
// Only temps and vars need their storage dead.
assert!(local.index() > arg_count);
@@ -1338,30 +1359,30 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
let is_coroutine = self.coroutine.is_some();

// Link the exit drop tree to unwind drop tree.
if drops.drops.iter().any(|(drop, _)| drop.kind == DropKind::Value) {
if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
let unwind_target = self.diverge_cleanup_target(else_scope, span);
let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) {
match drop_data.0.kind {
for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) {
match drop_node.data.kind {
DropKind::Storage => {
if is_coroutine {
let unwind_drop = self
.scopes
.unwind_drops
.add_drop(drop_data.0, unwind_indices[drop_data.1]);
.add_drop(drop_node.data, unwind_indices[drop_node.next]);
unwind_indices.push(unwind_drop);
} else {
unwind_indices.push(unwind_indices[drop_data.1]);
unwind_indices.push(unwind_indices[drop_node.next]);
}
}
DropKind::Value => {
let unwind_drop = self
.scopes
.unwind_drops
.add_drop(drop_data.0, unwind_indices[drop_data.1]);
.add_drop(drop_node.data, unwind_indices[drop_node.next]);
self.scopes.unwind_drops.add_entry_point(
blocks[drop_idx].unwrap(),
unwind_indices[drop_data.1],
unwind_indices[drop_node.next],
);
unwind_indices.push(unwind_drop);
}
@@ -1412,10 +1433,10 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
// prevent drop elaboration from creating drop flags that would have
// to be captured by the coroutine. I'm not sure how important this
// optimization is, but it is here.
for (drop_idx, drop_data) in drops.drops.iter_enumerated() {
if let DropKind::Value = drop_data.0.kind {
debug_assert!(drop_data.1 < drops.drops.next_index());
drops.entry_points.push((drop_data.1, blocks[drop_idx].unwrap()));
for (drop_idx, drop_node) in drops.drops.iter_enumerated() {
if let DropKind::Value = drop_node.data.kind {
debug_assert!(drop_node.next < drops.drops.next_index());
drops.entry_points.push((drop_node.next, blocks[drop_idx].unwrap()));
}
}
Self::build_unwind_tree(cfg, drops, fn_span, resume_block);