Skip to content

Commit 520cafa

Browse files
authored
Handle fixed stack slots in the move resolver (#78)
Fixed stack slots are treated as `PReg`s by most of the register allocator, but need some additional handling the move resolver to avoid generating stack-to-stack moves.
1 parent 1495c1e commit 520cafa

5 files changed

Lines changed: 75 additions & 23 deletions

File tree

fuzz/fuzz_targets/moves.rs

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,15 @@ use regalloc2::fuzzing::moves::{MoveAndScratchResolver, ParallelMoves};
1010
use regalloc2::{Allocation, PReg, RegClass, SpillSlot};
1111
use std::collections::{HashMap, HashSet};
1212

13+
fn is_stack_alloc(alloc: Allocation) -> bool {
14+
// Treat registers 20..=29 as fixed stack slots.
15+
if let Some(reg) = alloc.as_reg() {
16+
reg.index() > 20
17+
} else {
18+
alloc.is_stack()
19+
}
20+
}
21+
1322
#[derive(Clone, Debug)]
1423
struct TestCase {
1524
moves: Vec<(Allocation, Allocation)>,
@@ -82,7 +91,8 @@ fuzz_target!(|testcase: TestCase| {
8291
Allocation::stack(SpillSlot::new(slot, RegClass::Int))
8392
};
8493
let preferred_victim = PReg::new(0, RegClass::Int);
85-
let scratch_resolver = MoveAndScratchResolver::new(get_reg, get_stackslot, preferred_victim);
94+
let scratch_resolver =
95+
MoveAndScratchResolver::new(get_reg, get_stackslot, is_stack_alloc, preferred_victim);
8696
let moves = scratch_resolver.compute(moves);
8797
log::trace!("resolved moves: {:?}", moves);
8898

@@ -97,7 +107,7 @@ fuzz_target!(|testcase: TestCase| {
97107
// Simulate the sequence of moves.
98108
let mut locations: HashMap<Allocation, Allocation> = HashMap::new();
99109
for (src, dst, _) in moves {
100-
if src.is_stack() && dst.is_stack() {
110+
if is_stack_alloc(src) && is_stack_alloc(dst) {
101111
panic!("Stack-to-stack move!");
102112
}
103113

src/checker.rs

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@
9797

9898
use crate::{
9999
Allocation, AllocationKind, Block, Edit, Function, Inst, InstOrEdit, InstPosition, MachineEnv,
100-
Operand, OperandConstraint, OperandKind, OperandPos, Output, PReg, VReg,
100+
Operand, OperandConstraint, OperandKind, OperandPos, Output, PReg, PRegSet, VReg,
101101
};
102102
use fxhash::{FxHashMap, FxHashSet};
103103
use smallvec::{smallvec, SmallVec};
@@ -169,6 +169,10 @@ pub enum CheckerError {
169169
alloc: Allocation,
170170
vregs: FxHashSet<VReg>,
171171
},
172+
StackToStackMove {
173+
into: Allocation,
174+
from: Allocation,
175+
},
172176
}
173177

174178
/// Abstract state for an allocation.
@@ -560,7 +564,20 @@ impl CheckerState {
560564
}
561565
}
562566
}
563-
&CheckerInst::ParallelMove { .. } | &CheckerInst::Move { .. } => {
567+
&CheckerInst::Move { into, from } => {
568+
// Ensure that the allocator never returns stack-to-stack moves.
569+
let is_stack = |alloc: Allocation| {
570+
if let Some(reg) = alloc.as_reg() {
571+
checker.stack_pregs.contains(reg)
572+
} else {
573+
alloc.is_stack()
574+
}
575+
};
576+
if is_stack(into) && is_stack(from) {
577+
return Err(CheckerError::StackToStackMove { into, from });
578+
}
579+
}
580+
&CheckerInst::ParallelMove { .. } => {
564581
// This doesn't need verification; we just update
565582
// according to the move semantics in the step
566583
// function below.
@@ -813,6 +830,7 @@ pub struct Checker<'a, F: Function> {
813830
edge_insts: FxHashMap<(Block, Block), Vec<CheckerInst>>,
814831
reftyped_vregs: FxHashSet<VReg>,
815832
machine_env: &'a MachineEnv,
833+
stack_pregs: PRegSet,
816834
}
817835

818836
impl<'a, F: Function> Checker<'a, F> {
@@ -841,13 +859,19 @@ impl<'a, F: Function> Checker<'a, F> {
841859

842860
bb_in.insert(f.entry_block(), CheckerState::initial_with_pinned_vregs(f));
843861

862+
let mut stack_pregs = PRegSet::empty();
863+
for &preg in &machine_env.fixed_stack_slots {
864+
stack_pregs.add(preg);
865+
}
866+
844867
Checker {
845868
f,
846869
bb_in,
847870
bb_insts,
848871
edge_insts,
849872
reftyped_vregs,
850873
machine_env,
874+
stack_pregs,
851875
}
852876
}
853877

src/ion/moves.rs

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -39,16 +39,6 @@ impl<'a, F: Function> Env<'a, F> {
3939
pos == self.cfginfo.block_exit[block.index()]
4040
}
4141

42-
fn allocation_is_stack(&self, alloc: Allocation) -> bool {
43-
if alloc.is_stack() {
44-
true
45-
} else if let Some(preg) = alloc.as_reg() {
46-
self.pregs[preg.index()].is_stack
47-
} else {
48-
false
49-
}
50-
}
51-
5242
pub fn insert_move(
5343
&mut self,
5444
pos: ProgPoint,
@@ -1054,10 +1044,21 @@ impl<'a, F: Function> Env<'a, F> {
10541044
// below.
10551045
Allocation::stack(SpillSlot::new(SpillSlot::MAX - idx, regclass))
10561046
};
1047+
let is_stack_alloc = |alloc: Allocation| {
1048+
if let Some(preg) = alloc.as_reg() {
1049+
self.pregs[preg.index()].is_stack
1050+
} else {
1051+
alloc.is_stack()
1052+
}
1053+
};
10571054
let preferred_victim = self.preferred_victim_by_class[regclass as usize];
10581055

1059-
let scratch_resolver =
1060-
MoveAndScratchResolver::new(get_reg, get_stackslot, preferred_victim);
1056+
let scratch_resolver = MoveAndScratchResolver::new(
1057+
get_reg,
1058+
get_stackslot,
1059+
is_stack_alloc,
1060+
preferred_victim,
1061+
);
10611062

10621063
let resolved = scratch_resolver.compute(resolved);
10631064

src/lib.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -180,6 +180,13 @@ impl PRegSet {
180180
Self { bits: 0 }
181181
}
182182

183+
/// Returns whether the given register is part of the set.
184+
pub fn contains(&self, reg: PReg) -> bool {
185+
let bit = reg.index();
186+
debug_assert!(bit < 128);
187+
self.bits & 1u128 << bit != 0
188+
}
189+
183190
/// Add a physical register (PReg) to the set, returning the new value.
184191
pub const fn with(self, reg: PReg) -> Self {
185192
let bit = reg.index();

src/moves.rs

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -285,11 +285,11 @@ impl<T> MoveVecWithScratch<T> {
285285
}
286286

287287
/// Do any moves go from stack to stack?
288-
pub fn stack_to_stack(&self) -> bool {
288+
pub fn stack_to_stack(&self, is_stack_alloc: impl Fn(Allocation) -> bool) -> bool {
289289
match self {
290290
MoveVecWithScratch::NoScratch(moves) | MoveVecWithScratch::Scratch(moves) => moves
291291
.iter()
292-
.any(|(src, dst, _)| src.is_stack() && dst.is_stack()),
292+
.any(|&(src, dst, _)| is_stack_alloc(src) && is_stack_alloc(dst)),
293293
}
294294
}
295295
}
@@ -320,10 +320,11 @@ impl<T> MoveVecWithScratch<T> {
320320
/// Sometimes move elision will be able to clean this up a bit. But,
321321
/// for simplicity reasons, let's keep the concerns separated! So we
322322
/// always do the full expansion above.
323-
pub struct MoveAndScratchResolver<GetReg, GetStackSlot>
323+
pub struct MoveAndScratchResolver<GetReg, GetStackSlot, IsStackAlloc>
324324
where
325325
GetReg: FnMut() -> Option<Allocation>,
326326
GetStackSlot: FnMut() -> Allocation,
327+
IsStackAlloc: Fn(Allocation) -> bool,
327328
{
328329
/// Scratch register for stack-to-stack move expansion.
329330
stack_stack_scratch_reg: Option<Allocation>,
@@ -335,32 +336,41 @@ where
335336
find_free_reg: GetReg,
336337
/// Closure that gets us a stackslot, if needed.
337338
get_stackslot: GetStackSlot,
339+
/// Closure to determine whether an `Allocation` refers to a stack slot.
340+
is_stack_alloc: IsStackAlloc,
338341
/// The victim PReg to evict to another stackslot at every
339342
/// stack-to-stack move if a free PReg is not otherwise
340343
/// available. Provided by caller and statically chosen. This is a
341344
/// very last-ditch option, so static choice is OK.
342345
victim: PReg,
343346
}
344347

345-
impl<GetReg, GetStackSlot> MoveAndScratchResolver<GetReg, GetStackSlot>
348+
impl<GetReg, GetStackSlot, IsStackAlloc> MoveAndScratchResolver<GetReg, GetStackSlot, IsStackAlloc>
346349
where
347350
GetReg: FnMut() -> Option<Allocation>,
348351
GetStackSlot: FnMut() -> Allocation,
352+
IsStackAlloc: Fn(Allocation) -> bool,
349353
{
350-
pub fn new(find_free_reg: GetReg, get_stackslot: GetStackSlot, victim: PReg) -> Self {
354+
pub fn new(
355+
find_free_reg: GetReg,
356+
get_stackslot: GetStackSlot,
357+
is_stack_alloc: IsStackAlloc,
358+
victim: PReg,
359+
) -> Self {
351360
Self {
352361
stack_stack_scratch_reg: None,
353362
stack_stack_scratch_reg_save: None,
354363
find_free_reg,
355364
get_stackslot,
365+
is_stack_alloc,
356366
victim,
357367
}
358368
}
359369

360370
pub fn compute<T: Debug + Copy>(mut self, moves: MoveVecWithScratch<T>) -> MoveVec<T> {
361371
// First, do we have a vec with no stack-to-stack moves or use
362372
// of a scratch register? Fast return if so.
363-
if !moves.needs_scratch() && !moves.stack_to_stack() {
373+
if !moves.needs_scratch() && !moves.stack_to_stack(&self.is_stack_alloc) {
364374
return moves.without_scratch().unwrap();
365375
}
366376

@@ -373,7 +383,7 @@ where
373383
let moves = moves.with_scratch(scratch);
374384
for &(src, dst, data) in &moves {
375385
// Do we have a stack-to-stack move? If so, resolve.
376-
if src.is_stack() && dst.is_stack() {
386+
if (self.is_stack_alloc)(src) && (self.is_stack_alloc)(dst) {
377387
trace!("scratch resolver: stack to stack: {:?} -> {:?}", src, dst);
378388
// Lazily allocate a stack-to-stack scratch.
379389
if self.stack_stack_scratch_reg.is_none() {

0 commit comments

Comments
 (0)