nak: Rework RA a bit

Instead of tracking pinned things in the register allocator, we split
the register allocator into RegAllocator and PinnedRegAllocator.  The
RegAllocator struct only allows for very simple single-SSA allocations
and frees. It tracks locations of everything, what's used, etc. but
otherwise knows nothing about pinning or vectors.

The new PinnedRegAllocator struct wraps a RegAllocator by taking a
mutable reference to it.  It provides support for pinning and all the
vector stuff.  To destroy a PinnedRegAllocator, finish() is called which
re-places any evicted SSA values and populates an OpParCopy with any
needed copies.  Because PinnedRegAllocator owns a mutable reference to
the RegAllocator, it's impossible to mix uses of PinnedRegAllocator and
RegAllocator.  This ensures that, for as long as the pinned version
exists, nothing can be allocated which migh escape the pinning.

This fixes a bunch of corner cases when register pressure gets tight.

Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/24998>
This commit is contained in:
Faith Ekstrand 2023-06-07 19:21:04 -05:00 committed by Marge Bot
parent c9a6073754
commit 4dd277e233
2 changed files with 361 additions and 257 deletions

View file

@ -117,6 +117,12 @@ impl BitSet {
}
}
impl Default for BitSet {
fn default() -> BitSet {
BitSet::new()
}
}
impl BitAndAssign for BitSet {
fn bitand_assign(&mut self, rhs: BitSet) {
self.reserve_words(rhs.words.len());

View file

@ -68,8 +68,8 @@ impl SSAUseMap {
}
}
fn find_vec_use_after(&self, ssa: &SSAValue, ip: usize) -> Option<&SSAUse> {
if let Some(v) = self.ssa_map.get(ssa) {
fn find_vec_use_after(&self, ssa: SSAValue, ip: usize) -> Option<&SSAUse> {
if let Some(v) = self.ssa_map.get(&ssa) {
let p = v.partition_point(|(uip, _)| *uip <= ip);
if p == v.len() {
None
@ -150,22 +150,20 @@ impl PartialOrd for LiveValue {
}
#[derive(Clone)]
struct RegFileAllocation {
struct RegAllocator {
file: RegFile,
num_regs: u8,
used: BitSet,
pinned: BitSet,
reg_ssa: Vec<SSAValue>,
ssa_reg: HashMap<SSAValue, u8>,
}
impl RegFileAllocation {
impl RegAllocator {
pub fn new(file: RegFile, sm: u8) -> Self {
Self {
file: file,
num_regs: file.num_regs(sm),
used: BitSet::new(),
pinned: BitSet::new(),
reg_ssa: Vec::new(),
ssa_reg: HashMap::new(),
}
@ -175,249 +173,109 @@ impl RegFileAllocation {
self.file
}
pub fn begin_alloc(&mut self) {
self.pinned.clear();
fn reg_is_used(&self, reg: u8) -> bool {
self.used.get(reg.into())
}
pub fn end_alloc(&mut self) {}
fn is_reg_in_bounds(&self, reg: u8, comps: u8) -> bool {
if let Some(max_reg) = reg.checked_add(comps - 1) {
max_reg < self.num_regs
} else {
false
fn reg_range_is_unused(&self, reg: u8, comps: u8) -> bool {
for i in 0..comps {
if self.reg_is_used(reg + i) {
return false;
}
}
true
}
pub fn try_get_reg(&self, ssa: SSAValue) -> Option<u8> {
self.ssa_reg.get(&ssa).cloned()
}
pub fn get_reg(&self, ssa: SSAValue) -> u8 {
self.try_get_reg(ssa).expect("Undefined SSA value")
}
pub fn get_ssa(&self, reg: u8) -> Option<SSAValue> {
if self.used.get(reg.into()) {
pub fn try_get_ssa(&self, reg: u8) -> Option<SSAValue> {
if self.reg_is_used(reg) {
Some(self.reg_ssa[usize::from(reg)])
} else {
None
}
}
pub fn try_get_vec_reg(&self, vec: SSARef) -> Option<u8> {
pub fn try_get_vec_reg(&self, vec: &SSARef) -> Option<u8> {
let Some(reg) = self.try_get_reg(vec[0]) else {
return None;
};
let align = vec.comps().next_power_of_two();
let reg = self.get_reg(vec[0]);
if reg % align == 0 {
if reg % align != 0 {
return None;
}
for i in 1..vec.comps() {
if self.get_reg(vec[usize::from(i)]) != reg + i {
if self.try_get_reg(vec[usize::from(i)]) != Some(reg + i) {
return None;
}
}
Some(reg)
} else {
None
}
}
pub fn free_ssa(&mut self, ssa: SSAValue) -> u8 {
assert!(ssa.file() == self.file);
let reg = self.ssa_reg.remove(&ssa).unwrap();
assert!(self.used.get(reg.into()));
assert!(self.reg_is_used(reg));
assert!(self.reg_ssa[usize::from(reg)] == ssa);
self.used.remove(reg.into());
reg
}
pub fn free_killed(&mut self, killed: &KillSet) {
for ssa in killed.iter() {
if ssa.file() == self.file {
self.free_ssa(*ssa);
}
}
}
pub fn assign_reg(&mut self, ssa: SSAValue, reg: u8) -> RegRef {
assert!(ssa.file() == self.file);
assert!(reg < self.num_regs);
assert!(!self.used.get(reg.into()));
assert!(!self.reg_is_used(reg));
if usize::from(reg) >= self.reg_ssa.len() {
self.reg_ssa.resize(usize::from(reg) + 1, SSAValue::NONE);
}
self.reg_ssa[usize::from(reg)] = ssa;
self.ssa_reg.insert(ssa, reg);
let old = self.ssa_reg.insert(ssa, reg);
assert!(old.is_none());
self.used.insert(reg.into());
self.pinned.insert(reg.into());
RegRef::new(self.file, reg, 1)
}
pub fn assign_vec_reg(&mut self, ssa: SSARef, reg: u8) -> RegRef {
for i in 0..ssa.comps() {
self.assign_reg(ssa[usize::from(i)], reg + i);
}
RegRef::new(self.file, reg, ssa.comps())
}
pub fn try_find_unused_reg_range(
&self,
start_reg: u8,
align: u8,
comps: u8,
) -> Option<u8> {
assert!(comps > 0);
let align = comps.next_power_of_two();
let align = usize::from(align);
let mut next_reg = start_reg;
let mut next_reg = usize::from(start_reg);
loop {
let reg = self.used.next_unset(next_reg.into());
/* Ensure we're properly aligned */
let Ok(reg) = u8::try_from(reg.next_multiple_of(align.into())) else {
return None;
};
let reg = reg.next_multiple_of(align);
if !self.is_reg_in_bounds(reg, comps) {
return None;
}
let mut avail = true;
for c in 0..comps {
let reg_c = usize::from(reg + c);
if self.used.get(reg_c) || self.pinned.get(reg_c) {
avail = false;
break;
}
}
if avail {
return Some(reg);
}
next_reg = match reg.checked_add(align) {
Some(r) => r,
None => return None,
}
}
}
fn try_find_unused_reg(
&self,
start_reg: u8,
align: u8,
comp: u8,
) -> Option<u8> {
let mut reg = start_reg;
loop {
reg = match self.try_find_unused_reg_range(reg, 1) {
Some(r) => r,
None => break None,
};
if reg % align == comp {
return Some(reg);
}
reg += 1;
}
}
fn try_find_unpinned_reg_range(
&self,
start_reg: u8,
comps: u8,
) -> Option<u8> {
let align = comps.next_power_of_two();
let mut next_reg = start_reg;
loop {
let reg = self.pinned.next_unset(next_reg.into());
/* Ensure we're properly aligned */
let reg = match u8::try_from(reg.next_multiple_of(align.into())) {
Ok(r) => r,
Err(_) => return None,
};
if !self.is_reg_in_bounds(reg, comps) {
return None;
}
let mut is_pinned = false;
for i in 0..comps {
if self.pinned.get((reg + i).into()) {
is_pinned = true;
break;
}
}
if !is_pinned {
return Some(reg);
}
next_reg = match reg.checked_add(align) {
Some(r) => r,
None => return None,
}
}
}
pub fn try_find_unpinned_reg_near_ssa(&self, ssa: SSARef) -> Option<u8> {
/* Get something near component 0 */
self.try_find_unpinned_reg_range(self.get_reg(ssa[0]), ssa.comps())
}
pub fn get_scalar(&mut self, ssa: SSAValue) -> RegRef {
assert!(ssa.file() == self.file);
let reg = self.get_reg(ssa);
self.pinned.insert(reg.into());
RegRef::new(self.file, reg, 1)
}
pub fn move_to_reg(
&mut self,
pcopy: &mut OpParCopy,
ssa: SSARef,
reg: u8,
) -> RegRef {
for c in 0..ssa.comps() {
let old_reg = self.get_reg(ssa[usize::from(c)]);
if old_reg == reg + c {
continue;
}
self.free_ssa(ssa[usize::from(c)]);
/* If something already exists in the destination, swap it to the
* source.
/* Ensure we're in-bounds. This also serves as a check to ensure
* that u8::try_from(reg + i) will succeed.
*/
if let Some(evicted) = self.get_ssa(reg + c) {
self.free_ssa(evicted);
pcopy.push(
RegRef::new(self.file, old_reg, 1).into(),
RegRef::new(self.file, reg + c, 1).into(),
);
self.assign_reg(evicted, old_reg);
if reg > usize::from(self.num_regs - comps) {
return None;
}
pcopy.push(
RegRef::new(self.file, reg + c, 1).into(),
RegRef::new(self.file, old_reg, 1).into(),
);
self.assign_reg(ssa[usize::from(c)], reg + c);
let reg_u8 = u8::try_from(reg).unwrap();
if self.reg_range_is_unused(reg_u8, comps) {
return Some(reg_u8);
}
RegRef::new(self.file, reg, ssa.comps())
next_reg = reg + align;
}
}
pub fn get_vector(&mut self, pcopy: &mut OpParCopy, ssa: SSARef) -> RegRef {
let reg = self
.try_get_vec_reg(ssa)
.or_else(|| self.try_find_unused_reg_range(0, ssa.comps()))
.or_else(|| self.try_find_unpinned_reg_near_ssa(ssa))
.or_else(|| self.try_find_unpinned_reg_range(0, ssa.comps()))
.expect("Failed to find an unpinned register range");
for c in 0..ssa.comps() {
self.pinned.insert((reg + c).into());
}
self.move_to_reg(pcopy, ssa, reg)
pub fn get_scalar(&self, ssa: SSAValue) -> RegRef {
let reg = self.try_get_reg(ssa).expect("Unknown SSA value");
RegRef::new(self.file, reg, 1)
}
pub fn alloc_scalar(
@ -426,7 +284,7 @@ impl RegFileAllocation {
sum: &SSAUseMap,
ssa: SSAValue,
) -> RegRef {
if let Some(u) = sum.find_vec_use_after(&ssa, ip) {
if let Some(u) = sum.find_vec_use_after(ssa, ip) {
match u {
SSAUse::FixedReg(reg) => {
if !self.used.get((*reg).into()) {
@ -456,6 +314,10 @@ impl RegFileAllocation {
continue;
}
if vec_reg + comp >= self.num_regs {
continue;
}
if !self.used.get((vec_reg + comp).into()) {
return self.assign_reg(ssa, vec_reg + comp);
}
@ -465,7 +327,8 @@ impl RegFileAllocation {
/* We weren't able to pair it with an already allocated
* register but maybe we can at least find an aligned one.
*/
if let Some(reg) = self.try_find_unused_reg(0, align, comp)
if let Some(reg) =
self.try_find_unused_reg_range(0, align, 1)
{
return self.assign_reg(ssa, reg);
}
@ -474,56 +337,288 @@ impl RegFileAllocation {
}
let reg = self
.try_find_unused_reg_range(0, 1)
.try_find_unused_reg_range(0, 1, 1)
.expect("Failed to find free register");
self.assign_reg(ssa, reg)
}
}
pub fn alloc_vector(
&mut self,
pcopy: &mut OpParCopy,
ssa: SSARef,
) -> RegRef {
struct PinnedRegAllocator<'a> {
ra: &'a mut RegAllocator,
pcopy: OpParCopy,
pinned: BitSet,
evicted: HashMap<SSAValue, u8>,
}
impl<'a> PinnedRegAllocator<'a> {
fn new(ra: &'a mut RegAllocator) -> Self {
PinnedRegAllocator {
ra: ra,
pcopy: OpParCopy::new(),
pinned: Default::default(),
evicted: HashMap::new(),
}
}
fn file(&self) -> RegFile {
self.ra.file()
}
fn pin_reg(&mut self, reg: u8) {
self.pinned.insert(reg.into());
}
fn pin_reg_range(&mut self, reg: u8, comps: u8) {
for i in 0..comps {
self.pin_reg(reg + i);
}
}
fn reg_is_pinned(&self, reg: u8) -> bool {
self.pinned.get(reg.into())
}
fn reg_range_is_unpinned(&self, reg: u8, comps: u8) -> bool {
for i in 0..comps {
if self.pinned.get((reg + i).into()) {
return false;
}
}
true
}
fn assign_pin_reg(&mut self, ssa: SSAValue, reg: u8) -> RegRef {
self.pin_reg(reg);
self.ra.assign_reg(ssa, reg)
}
pub fn assign_pin_vec_reg(&mut self, ssa: SSARef, reg: u8) -> RegRef {
for c in 0..ssa.comps() {
self.assign_pin_reg(ssa[usize::from(c)], reg + c);
}
RegRef::new(self.file(), reg, ssa.comps())
}
fn try_find_unpinned_reg_range(
&self,
start_reg: u8,
align: u8,
comps: u8,
) -> Option<u8> {
let align = usize::from(align);
let mut next_reg = usize::from(start_reg);
loop {
let reg = self.pinned.next_unset(next_reg);
/* Ensure we're properly aligned */
let reg = reg.next_multiple_of(align);
/* Ensure we're in-bounds. This also serves as a check to ensure
* that u8::try_from(reg + i) will succeed.
*/
if reg > usize::from(self.ra.num_regs - comps) {
return None;
}
let reg_u8 = u8::try_from(reg).unwrap();
if self.reg_range_is_unpinned(reg_u8, comps) {
return Some(reg_u8);
}
next_reg = reg + align;
}
}
pub fn evict_ssa(&mut self, ssa: SSAValue, old_reg: u8) {
assert!(ssa.file() == self.file());
assert!(!self.reg_is_pinned(old_reg));
self.evicted.insert(ssa, old_reg);
}
pub fn evict_reg_if_used(&mut self, reg: u8) {
assert!(!self.reg_is_pinned(reg));
if let Some(ssa) = self.ra.try_get_ssa(reg) {
self.ra.free_ssa(ssa);
self.evict_ssa(ssa, reg);
}
}
fn move_ssa_to_reg(&mut self, ssa: SSAValue, new_reg: u8) {
if let Some(old_reg) = self.ra.try_get_reg(ssa) {
assert!(self.evicted.get(&ssa).is_none());
assert!(!self.reg_is_pinned(old_reg));
if new_reg == old_reg {
self.pin_reg(new_reg);
} else {
self.ra.free_ssa(ssa);
self.evict_reg_if_used(new_reg);
self.pcopy.push(
RegRef::new(self.file(), new_reg, 1).into(),
RegRef::new(self.file(), old_reg, 1).into(),
);
self.assign_pin_reg(ssa, new_reg);
}
} else if let Some(old_reg) = self.evicted.remove(&ssa) {
self.evict_reg_if_used(new_reg);
self.pcopy.push(
RegRef::new(self.file(), new_reg, 1).into(),
RegRef::new(self.file(), old_reg, 1).into(),
);
self.assign_pin_reg(ssa, new_reg);
} else {
panic!("Unknown SSA value");
}
}
fn finish(mut self, pcopy: &mut OpParCopy) {
pcopy.srcs.append(&mut self.pcopy.srcs);
pcopy.dsts.append(&mut self.pcopy.dsts);
if !self.evicted.is_empty() {
/* Sort so we get determinism, even if the hash map order changes
* from one run to another or due to rust compiler updates.
*/
let mut evicted: Vec<_> = self.evicted.drain().collect();
evicted.sort_by_key(|(_, reg)| *reg);
for (ssa, old_reg) in evicted {
let mut next_reg = 0;
let new_reg = loop {
let reg = self
.try_find_unused_reg_range(0, ssa.comps())
.or_else(|| self.try_find_unpinned_reg_range(0, ssa.comps()))
.ra
.try_find_unused_reg_range(next_reg, 1, 1)
.expect("Failed to find free register");
if !self.reg_is_pinned(reg) {
break reg;
}
next_reg = reg + 1;
};
pcopy.push(
RegRef::new(self.file(), new_reg, 1).into(),
RegRef::new(self.file(), old_reg, 1).into(),
);
self.assign_pin_reg(ssa, new_reg);
}
}
}
pub fn try_get_vec_reg(&self, vec: &SSARef) -> Option<u8> {
self.ra.try_get_vec_reg(vec)
}
pub fn collect_vector(&mut self, vec: &SSARef) -> RegRef {
if let Some(reg) = self.try_get_vec_reg(vec) {
self.pin_reg_range(reg, vec.comps());
return RegRef::new(self.file(), reg, vec.comps());
}
let comps = vec.comps();
let align = comps.next_power_of_two();
let reg = self
.ra
.try_find_unused_reg_range(0, align, comps)
.or_else(|| {
for c in 0..vec.comps() {
let ssa = vec[usize::from(c)];
let Some(comp_reg) = self.ra.try_get_reg(ssa) else {
continue;
};
let Some(reg) = comp_reg.checked_sub(c) else {
continue;
};
if reg % align != 0 {
continue;
}
if let Some(end) = reg.checked_add(comps) {
if end > self.ra.num_regs {
continue;
}
} else {
continue;
}
if self.reg_range_is_unpinned(reg, comps) {
return Some(reg);
}
}
None
})
.or_else(|| self.try_find_unpinned_reg_range(0, align, comps))
.expect("Failed to find an unpinned register range");
for c in 0..ssa.comps() {
self.pinned.insert((reg + c).into());
for c in 0..vec.comps() {
self.move_ssa_to_reg(vec[usize::from(c)], reg + c);
}
for c in 0..ssa.comps() {
if let Some(evicted) = self.get_ssa(reg + c) {
self.free_ssa(evicted);
let new_reg = self.try_find_unused_reg_range(0, 1).unwrap();
pcopy.push(
RegRef::new(self.file, new_reg, 1).into(),
RegRef::new(self.file, reg + c, 1).into(),
);
self.assign_reg(evicted, new_reg);
RegRef::new(self.file(), reg, comps)
}
pub fn alloc_vector(&mut self, vec: SSARef) -> RegRef {
let comps = vec.comps();
let align = comps.next_power_of_two();
if let Some(reg) = self.ra.try_find_unused_reg_range(0, align, comps) {
return self.assign_pin_vec_reg(vec, reg);
}
let reg = self
.try_find_unpinned_reg_range(0, align, comps)
.expect("Failed to find an unpinned register range");
for c in 0..vec.comps() {
self.evict_reg_if_used(reg + c);
}
self.assign_pin_vec_reg(vec, reg)
}
pub fn free_killed(&mut self, killed: &KillSet) {
for ssa in killed.iter() {
if ssa.file() == self.file() {
self.ra.free_ssa(*ssa);
}
}
}
}
self.assign_vec_reg(ssa, reg)
impl Drop for PinnedRegAllocator<'_> {
fn drop(&mut self) {
assert!(self.evicted.is_empty());
}
}
fn instr_remap_srcs_file(instr: &mut Instr, ra: &mut PinnedRegAllocator) {
/* Collect vector sources first since those may silently pin some of our
* scalar sources.
*/
for src in instr.srcs_mut() {
if let SrcRef::SSA(ssa) = &src.src_ref {
if ssa.file() == ra.file() && ssa.comps() > 1 {
src.src_ref = ra.collect_vector(ssa).into();
}
}
}
fn instr_remap_srcs_file(
instr: &mut Instr,
pcopy: &mut OpParCopy,
ra: &mut RegFileAllocation,
) {
if let PredRef::SSA(pred) = instr.pred.pred_ref {
if pred.file() == ra.file() {
instr.pred.pred_ref = ra.get_scalar(pred).into();
instr.pred.pred_ref = ra.collect_vector(&pred.into()).into();
}
}
for src in instr.srcs_mut() {
if let SrcRef::SSA(ssa) = src.src_ref {
if ssa.file() == ra.file() {
src.src_ref = ra.get_vector(pcopy, ssa).into();
if let SrcRef::SSA(ssa) = &src.src_ref {
if ssa.file() == ra.file() && ssa.comps() == 1 {
src.src_ref = ra.collect_vector(ssa).into();
}
}
}
@ -533,7 +628,7 @@ fn instr_alloc_scalar_dsts_file(
instr: &mut Instr,
ip: usize,
sum: &SSAUseMap,
ra: &mut RegFileAllocation,
ra: &mut RegAllocator,
) {
for dst in instr.dsts_mut() {
if let Dst::SSA(ssa) = dst {
@ -551,7 +646,7 @@ fn instr_assign_regs_file(
sum: &SSAUseMap,
killed: &KillSet,
pcopy: &mut OpParCopy,
ra: &mut RegFileAllocation,
ra: &mut RegAllocator,
) {
struct VecDst {
dst_idx: usize,
@ -578,13 +673,11 @@ fn instr_assign_regs_file(
/* No vector destinations is the easy case */
if vec_dst_comps == 0 {
ra.begin_alloc();
instr_remap_srcs_file(instr, pcopy, ra);
ra.end_alloc();
ra.free_killed(killed);
ra.begin_alloc();
let mut pra = PinnedRegAllocator::new(ra);
instr_remap_srcs_file(instr, &mut pra);
pra.free_killed(killed);
pra.finish(pcopy);
instr_alloc_scalar_dsts_file(instr, ip, sum, ra);
ra.end_alloc();
return;
}
@ -634,8 +727,9 @@ fn instr_assign_regs_file(
vec_dsts_map_to_killed_srcs = false;
}
let align = vec_dst.comps.next_power_of_two();
if let Some(reg) =
ra.try_find_unused_reg_range(next_dst_reg, vec_dst.comps)
ra.try_find_unused_reg_range(next_dst_reg, align, vec_dst.comps)
{
vec_dst.reg = reg;
next_dst_reg = reg + vec_dst.comps;
@ -644,58 +738,63 @@ fn instr_assign_regs_file(
}
}
ra.begin_alloc();
if vec_dsts_map_to_killed_srcs {
instr_remap_srcs_file(instr, pcopy, ra);
let mut pra = PinnedRegAllocator::new(ra);
instr_remap_srcs_file(instr, &mut pra);
for vec_dst in &mut vec_dsts {
vec_dst.reg = ra.try_get_vec_reg(vec_dst.killed.unwrap()).unwrap();
let src_vec = vec_dst.killed.as_ref().unwrap();
vec_dst.reg = pra.try_get_vec_reg(src_vec).unwrap();
}
ra.free_killed(killed);
pra.free_killed(killed);
for vec_dst in vec_dsts {
let dst = &mut instr.dsts_mut()[vec_dst.dst_idx];
*dst = ra
.assign_vec_reg(*dst.as_ssa().unwrap(), vec_dst.reg)
*dst = pra
.assign_pin_vec_reg(*dst.as_ssa().unwrap(), vec_dst.reg)
.into();
}
pra.finish(pcopy);
instr_alloc_scalar_dsts_file(instr, ip, sum, ra);
} else if could_trivially_allocate {
let mut pra = PinnedRegAllocator::new(ra);
for vec_dst in vec_dsts {
let dst = &mut instr.dsts_mut()[vec_dst.dst_idx];
*dst = ra
.assign_vec_reg(*dst.as_ssa().unwrap(), vec_dst.reg)
*dst = pra
.assign_pin_vec_reg(*dst.as_ssa().unwrap(), vec_dst.reg)
.into();
}
instr_remap_srcs_file(instr, pcopy, ra);
ra.free_killed(killed);
instr_remap_srcs_file(instr, &mut pra);
pra.free_killed(killed);
pra.finish(pcopy);
instr_alloc_scalar_dsts_file(instr, ip, sum, ra);
} else {
instr_remap_srcs_file(instr, pcopy, ra);
ra.free_killed(killed);
let mut pra = PinnedRegAllocator::new(ra);
instr_remap_srcs_file(instr, &mut pra);
/* Allocate vector destinations first so we have the most freedom.
* Scalar destinations can fill in holes.
*/
for dst in instr.dsts_mut() {
if let Dst::SSA(ssa) = dst {
if ssa.file() == ra.file() && ssa.comps() > 1 {
*dst = ra.alloc_vector(pcopy, *ssa).into();
if ssa.file() == pra.file() && ssa.comps() > 1 {
*dst = pra.alloc_vector(*ssa).into();
}
}
}
pra.free_killed(killed);
pra.finish(pcopy);
instr_alloc_scalar_dsts_file(instr, ip, sum, ra);
}
ra.end_alloc();
}
impl PerRegFile<RegFileAllocation> {
impl PerRegFile<RegAllocator> {
pub fn free_killed(&mut self, killed: &KillSet) {
for ssa in killed.iter() {
self[ssa.file()].free_ssa(*ssa);
@ -704,7 +803,7 @@ impl PerRegFile<RegFileAllocation> {
}
struct AssignRegsBlock {
ra: PerRegFile<RegFileAllocation>,
ra: PerRegFile<RegAllocator>,
live_in: Vec<LiveValue>,
phi_out: HashMap<u32, SrcRef>,
}
@ -712,7 +811,7 @@ struct AssignRegsBlock {
impl AssignRegsBlock {
fn new(sm: u8) -> AssignRegsBlock {
AssignRegsBlock {
ra: PerRegFile::new_with(&|file| RegFileAllocation::new(file, sm)),
ra: PerRegFile::new_with(&|file| RegAllocator::new(file, sm)),
live_in: Vec::new(),
phi_out: HashMap::new(),
}
@ -780,7 +879,7 @@ impl AssignRegsBlock {
&mut self,
b: &mut BasicBlock,
bl: &BlockLiveness,
pred_ra: Option<&PerRegFile<RegFileAllocation>>,
pred_ra: Option<&PerRegFile<RegAllocator>>,
) {
/* Populate live in from the register file we're handed. We'll add more
* live in when we process the OpPhiDst, if any.
@ -846,8 +945,7 @@ impl AssignRegsBlock {
for lv in &target.live_in {
let src = match lv.live_ref {
LiveRef::SSA(ssa) => {
let reg = self.ra[ssa.file()].get_reg(ssa);
SrcRef::from(RegRef::new(ssa.file(), reg, 1))
SrcRef::from(self.ra[ssa.file()].get_scalar(ssa))
}
LiveRef::Phi(phi) => *self.phi_out.get(&phi).unwrap(),
};