1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 /// \file This register allocator allocates registers to a basic block at a
11 /// time, attempting to keep values in registers and reusing registers as
12 /// appropriate.
13 //
14 //===----------------------------------------------------------------------===//
15
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/DenseMap.h"
18 #include "llvm/ADT/IndexedMap.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/SparseSet.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineFunctionPass.h"
27 #include "llvm/CodeGen/MachineInstr.h"
28 #include "llvm/CodeGen/MachineInstrBuilder.h"
29 #include "llvm/CodeGen/MachineOperand.h"
30 #include "llvm/CodeGen/MachineRegisterInfo.h"
31 #include "llvm/CodeGen/RegAllocRegistry.h"
32 #include "llvm/CodeGen/RegisterClassInfo.h"
33 #include "llvm/CodeGen/TargetInstrInfo.h"
34 #include "llvm/CodeGen/TargetOpcodes.h"
35 #include "llvm/CodeGen/TargetRegisterInfo.h"
36 #include "llvm/CodeGen/TargetSubtargetInfo.h"
37 #include "llvm/IR/DebugLoc.h"
38 #include "llvm/IR/Metadata.h"
39 #include "llvm/MC/MCInstrDesc.h"
40 #include "llvm/MC/MCRegisterInfo.h"
41 #include "llvm/Pass.h"
42 #include "llvm/Support/Casting.h"
43 #include "llvm/Support/Compiler.h"
44 #include "llvm/Support/Debug.h"
45 #include "llvm/Support/ErrorHandling.h"
46 #include "llvm/Support/raw_ostream.h"
47 #include <cassert>
48 #include <tuple>
49 #include <vector>
50
51 using namespace llvm;
52
53 #define DEBUG_TYPE "regalloc"
54
55 STATISTIC(NumStores, "Number of stores added");
56 STATISTIC(NumLoads , "Number of loads added");
57 STATISTIC(NumCoalesced, "Number of copies coalesced");
58
59 static RegisterRegAlloc
60 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
61
62 namespace {
63
64 class RegAllocFast : public MachineFunctionPass {
65 public:
66 static char ID;
67
RegAllocFast()68 RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {}
69
70 private:
71 MachineFrameInfo *MFI;
72 MachineRegisterInfo *MRI;
73 const TargetRegisterInfo *TRI;
74 const TargetInstrInfo *TII;
75 RegisterClassInfo RegClassInfo;
76
77 /// Basic block currently being allocated.
78 MachineBasicBlock *MBB;
79
80 /// Maps virtual regs to the frame index where these values are spilled.
81 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
82
83 /// Everything we know about a live virtual register.
84 struct LiveReg {
85 MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
86 unsigned VirtReg; ///< Virtual register number.
87 MCPhysReg PhysReg = 0; ///< Currently held here.
88 unsigned short LastOpNum = 0; ///< OpNum on LastUse.
89 bool Dirty = false; ///< Register needs spill.
90
LiveReg__anon233ffc900111::RegAllocFast::LiveReg91 explicit LiveReg(unsigned VirtReg) : VirtReg(VirtReg) {}
92
getSparseSetIndex__anon233ffc900111::RegAllocFast::LiveReg93 unsigned getSparseSetIndex() const {
94 return TargetRegisterInfo::virtReg2Index(VirtReg);
95 }
96 };
97
98 using LiveRegMap = SparseSet<LiveReg>;
99 /// This map contains entries for each virtual register that is currently
100 /// available in a physical register.
101 LiveRegMap LiveVirtRegs;
102
103 DenseMap<unsigned, SmallVector<MachineInstr *, 2>> LiveDbgValueMap;
104
105 /// State of a physical register.
106 enum RegState {
107 /// A disabled register is not available for allocation, but an alias may
108 /// be in use. A register can only be moved out of the disabled state if
109 /// all aliases are disabled.
110 regDisabled,
111
112 /// A free register is not currently in use and can be allocated
113 /// immediately without checking aliases.
114 regFree,
115
116 /// A reserved register has been assigned explicitly (e.g., setting up a
117 /// call parameter), and it remains reserved until it is used.
118 regReserved
119
120 /// A register state may also be a virtual register number, indication
121 /// that the physical register is currently allocated to a virtual
122 /// register. In that case, LiveVirtRegs contains the inverse mapping.
123 };
124
125 /// Maps each physical register to a RegState enum or a virtual register.
126 std::vector<unsigned> PhysRegState;
127
128 SmallVector<unsigned, 16> VirtDead;
129 SmallVector<MachineInstr *, 32> Coalesced;
130
131 using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>;
132 /// Set of register units that are used in the current instruction, and so
133 /// cannot be allocated.
134 RegUnitSet UsedInInstr;
135
136 void setPhysRegState(MCPhysReg PhysReg, unsigned NewState);
137
138 /// Mark a physreg as used in this instruction.
markRegUsedInInstr(MCPhysReg PhysReg)139 void markRegUsedInInstr(MCPhysReg PhysReg) {
140 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
141 UsedInInstr.insert(*Units);
142 }
143
144 /// Check if a physreg or any of its aliases are used in this instruction.
isRegUsedInInstr(MCPhysReg PhysReg) const145 bool isRegUsedInInstr(MCPhysReg PhysReg) const {
146 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
147 if (UsedInInstr.count(*Units))
148 return true;
149 return false;
150 }
151
152 enum : unsigned {
153 spillClean = 50,
154 spillDirty = 100,
155 spillImpossible = ~0u
156 };
157
158 public:
getPassName() const159 StringRef getPassName() const override { return "Fast Register Allocator"; }
160
getAnalysisUsage(AnalysisUsage & AU) const161 void getAnalysisUsage(AnalysisUsage &AU) const override {
162 AU.setPreservesCFG();
163 MachineFunctionPass::getAnalysisUsage(AU);
164 }
165
getRequiredProperties() const166 MachineFunctionProperties getRequiredProperties() const override {
167 return MachineFunctionProperties().set(
168 MachineFunctionProperties::Property::NoPHIs);
169 }
170
getSetProperties() const171 MachineFunctionProperties getSetProperties() const override {
172 return MachineFunctionProperties().set(
173 MachineFunctionProperties::Property::NoVRegs);
174 }
175
176 private:
177 bool runOnMachineFunction(MachineFunction &MF) override;
178
179 void allocateBasicBlock(MachineBasicBlock &MBB);
180 void allocateInstruction(MachineInstr &MI);
181 void handleDebugValue(MachineInstr &MI);
182 void handleThroughOperands(MachineInstr &MI,
183 SmallVectorImpl<unsigned> &VirtDead);
184 bool isLastUseOfLocalReg(const MachineOperand &MO) const;
185
186 void addKillFlag(const LiveReg &LRI);
187 void killVirtReg(LiveReg &LR);
188 void killVirtReg(unsigned VirtReg);
189 void spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR);
190 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
191
192 void usePhysReg(MachineOperand &MO);
193 void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg,
194 RegState NewState);
195 unsigned calcSpillCost(MCPhysReg PhysReg) const;
196 void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg);
197
findLiveVirtReg(unsigned VirtReg)198 LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
199 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
200 }
201
findLiveVirtReg(unsigned VirtReg) const202 LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
203 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
204 }
205
206 void allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint);
207 MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
208 unsigned Hint);
209 LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
210 unsigned Hint);
211 void spillAll(MachineBasicBlock::iterator MI);
212 bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
213
214 int getStackSpaceFor(unsigned VirtReg);
215 void spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
216 MCPhysReg AssignedReg, bool Kill);
217 void reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
218 MCPhysReg PhysReg);
219
220 void dumpState();
221 };
222
223 } // end anonymous namespace
224
225 char RegAllocFast::ID = 0;
226
227 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
228 false)
229
setPhysRegState(MCPhysReg PhysReg,unsigned NewState)230 void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) {
231 PhysRegState[PhysReg] = NewState;
232 }
233
234 /// This allocates space for the specified virtual register to be held on the
235 /// stack.
getStackSpaceFor(unsigned VirtReg)236 int RegAllocFast::getStackSpaceFor(unsigned VirtReg) {
237 // Find the location Reg would belong...
238 int SS = StackSlotForVirtReg[VirtReg];
239 // Already has space allocated?
240 if (SS != -1)
241 return SS;
242
243 // Allocate a new stack object for this spill location...
244 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
245 unsigned Size = TRI->getSpillSize(RC);
246 unsigned Align = TRI->getSpillAlignment(RC);
247 int FrameIdx = MFI->CreateSpillStackObject(Size, Align);
248
249 // Assign the slot.
250 StackSlotForVirtReg[VirtReg] = FrameIdx;
251 return FrameIdx;
252 }
253
254 /// Insert spill instruction for \p AssignedReg before \p Before. Update
255 /// DBG_VALUEs with \p VirtReg operands with the stack slot.
spill(MachineBasicBlock::iterator Before,unsigned VirtReg,MCPhysReg AssignedReg,bool Kill)256 void RegAllocFast::spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
257 MCPhysReg AssignedReg, bool Kill) {
258 LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI)
259 << " in " << printReg(AssignedReg, TRI));
260 int FI = getStackSpaceFor(VirtReg);
261 LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
262
263 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
264 TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI);
265 ++NumStores;
266
267 // If this register is used by DBG_VALUE then insert new DBG_VALUE to
268 // identify spilled location as the place to find corresponding variable's
269 // value.
270 SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[VirtReg];
271 for (MachineInstr *DBG : LRIDbgValues) {
272 MachineInstr *NewDV = buildDbgValueForSpill(*MBB, Before, *DBG, FI);
273 assert(NewDV->getParent() == MBB && "dangling parent pointer");
274 (void)NewDV;
275 LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
276 }
277 // Now this register is spilled there is should not be any DBG_VALUE
278 // pointing to this register because they are all pointing to spilled value
279 // now.
280 LRIDbgValues.clear();
281 }
282
283 /// Insert reload instruction for \p PhysReg before \p Before.
reload(MachineBasicBlock::iterator Before,unsigned VirtReg,MCPhysReg PhysReg)284 void RegAllocFast::reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
285 MCPhysReg PhysReg) {
286 LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
287 << printReg(PhysReg, TRI) << '\n');
288 int FI = getStackSpaceFor(VirtReg);
289 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
290 TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI);
291 ++NumLoads;
292 }
293
294 /// Return true if MO is the only remaining reference to its virtual register,
295 /// and it is guaranteed to be a block-local register.
isLastUseOfLocalReg(const MachineOperand & MO) const296 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const {
297 // If the register has ever been spilled or reloaded, we conservatively assume
298 // it is a global register used in multiple blocks.
299 if (StackSlotForVirtReg[MO.getReg()] != -1)
300 return false;
301
302 // Check that the use/def chain has exactly one operand - MO.
303 MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
304 if (&*I != &MO)
305 return false;
306 return ++I == MRI->reg_nodbg_end();
307 }
308
309 /// Set kill flags on last use of a virtual register.
addKillFlag(const LiveReg & LR)310 void RegAllocFast::addKillFlag(const LiveReg &LR) {
311 if (!LR.LastUse) return;
312 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
313 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
314 if (MO.getReg() == LR.PhysReg)
315 MO.setIsKill();
316 // else, don't do anything we are problably redefining a
317 // subreg of this register and given we don't track which
318 // lanes are actually dead, we cannot insert a kill flag here.
319 // Otherwise we may end up in a situation like this:
320 // ... = (MO) physreg:sub1, implicit killed physreg
321 // ... <== Here we would allow later pass to reuse physreg:sub1
322 // which is potentially wrong.
323 // LR:sub0 = ...
324 // ... = LR.sub1 <== This is going to use physreg:sub1
325 }
326 }
327
328 /// Mark virtreg as no longer available.
killVirtReg(LiveReg & LR)329 void RegAllocFast::killVirtReg(LiveReg &LR) {
330 addKillFlag(LR);
331 assert(PhysRegState[LR.PhysReg] == LR.VirtReg &&
332 "Broken RegState mapping");
333 setPhysRegState(LR.PhysReg, regFree);
334 LR.PhysReg = 0;
335 }
336
337 /// Mark virtreg as no longer available.
killVirtReg(unsigned VirtReg)338 void RegAllocFast::killVirtReg(unsigned VirtReg) {
339 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
340 "killVirtReg needs a virtual register");
341 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
342 if (LRI != LiveVirtRegs.end() && LRI->PhysReg)
343 killVirtReg(*LRI);
344 }
345
346 /// This method spills the value specified by VirtReg into the corresponding
347 /// stack slot if needed.
spillVirtReg(MachineBasicBlock::iterator MI,unsigned VirtReg)348 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
349 unsigned VirtReg) {
350 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
351 "Spilling a physical register is illegal!");
352 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
353 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
354 "Spilling unmapped virtual register");
355 spillVirtReg(MI, *LRI);
356 }
357
358 /// Do the actual work of spilling.
spillVirtReg(MachineBasicBlock::iterator MI,LiveReg & LR)359 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR) {
360 assert(PhysRegState[LR.PhysReg] == LR.VirtReg && "Broken RegState mapping");
361
362 if (LR.Dirty) {
363 // If this physreg is used by the instruction, we want to kill it on the
364 // instruction, not on the spill.
365 bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI;
366 LR.Dirty = false;
367
368 spill(MI, LR.VirtReg, LR.PhysReg, SpillKill);
369
370 if (SpillKill)
371 LR.LastUse = nullptr; // Don't kill register again
372 }
373 killVirtReg(LR);
374 }
375
376 /// Spill all dirty virtregs without killing them.
spillAll(MachineBasicBlock::iterator MI)377 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI) {
378 if (LiveVirtRegs.empty())
379 return;
380 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
381 // of spilling here is deterministic, if arbitrary.
382 for (LiveReg &LR : LiveVirtRegs) {
383 if (!LR.PhysReg)
384 continue;
385 spillVirtReg(MI, LR);
386 }
387 LiveVirtRegs.clear();
388 }
389
390 /// Handle the direct use of a physical register. Check that the register is
391 /// not used by a virtreg. Kill the physreg, marking it free. This may add
392 /// implicit kills to MO->getParent() and invalidate MO.
usePhysReg(MachineOperand & MO)393 void RegAllocFast::usePhysReg(MachineOperand &MO) {
394 // Ignore undef uses.
395 if (MO.isUndef())
396 return;
397
398 unsigned PhysReg = MO.getReg();
399 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) &&
400 "Bad usePhysReg operand");
401
402 markRegUsedInInstr(PhysReg);
403 switch (PhysRegState[PhysReg]) {
404 case regDisabled:
405 break;
406 case regReserved:
407 PhysRegState[PhysReg] = regFree;
408 LLVM_FALLTHROUGH;
409 case regFree:
410 MO.setIsKill();
411 return;
412 default:
413 // The physreg was allocated to a virtual register. That means the value we
414 // wanted has been clobbered.
415 llvm_unreachable("Instruction uses an allocated register");
416 }
417
418 // Maybe a superregister is reserved?
419 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
420 MCPhysReg Alias = *AI;
421 switch (PhysRegState[Alias]) {
422 case regDisabled:
423 break;
424 case regReserved:
425 // Either PhysReg is a subregister of Alias and we mark the
426 // whole register as free, or PhysReg is the superregister of
427 // Alias and we mark all the aliases as disabled before freeing
428 // PhysReg.
429 // In the latter case, since PhysReg was disabled, this means that
430 // its value is defined only by physical sub-registers. This check
431 // is performed by the assert of the default case in this loop.
432 // Note: The value of the superregister may only be partial
433 // defined, that is why regDisabled is a valid state for aliases.
434 assert((TRI->isSuperRegister(PhysReg, Alias) ||
435 TRI->isSuperRegister(Alias, PhysReg)) &&
436 "Instruction is not using a subregister of a reserved register");
437 LLVM_FALLTHROUGH;
438 case regFree:
439 if (TRI->isSuperRegister(PhysReg, Alias)) {
440 // Leave the superregister in the working set.
441 setPhysRegState(Alias, regFree);
442 MO.getParent()->addRegisterKilled(Alias, TRI, true);
443 return;
444 }
445 // Some other alias was in the working set - clear it.
446 setPhysRegState(Alias, regDisabled);
447 break;
448 default:
449 llvm_unreachable("Instruction uses an alias of an allocated register");
450 }
451 }
452
453 // All aliases are disabled, bring register into working set.
454 setPhysRegState(PhysReg, regFree);
455 MO.setIsKill();
456 }
457
458 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
459 /// similar to defineVirtReg except the physreg is reserved instead of
460 /// allocated.
definePhysReg(MachineBasicBlock::iterator MI,MCPhysReg PhysReg,RegState NewState)461 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI,
462 MCPhysReg PhysReg, RegState NewState) {
463 markRegUsedInInstr(PhysReg);
464 switch (unsigned VirtReg = PhysRegState[PhysReg]) {
465 case regDisabled:
466 break;
467 default:
468 spillVirtReg(MI, VirtReg);
469 LLVM_FALLTHROUGH;
470 case regFree:
471 case regReserved:
472 setPhysRegState(PhysReg, NewState);
473 return;
474 }
475
476 // This is a disabled register, disable all aliases.
477 setPhysRegState(PhysReg, NewState);
478 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
479 MCPhysReg Alias = *AI;
480 switch (unsigned VirtReg = PhysRegState[Alias]) {
481 case regDisabled:
482 break;
483 default:
484 spillVirtReg(MI, VirtReg);
485 LLVM_FALLTHROUGH;
486 case regFree:
487 case regReserved:
488 setPhysRegState(Alias, regDisabled);
489 if (TRI->isSuperRegister(PhysReg, Alias))
490 return;
491 break;
492 }
493 }
494 }
495
496 /// Return the cost of spilling clearing out PhysReg and aliases so it is free
497 /// for allocation. Returns 0 when PhysReg is free or disabled with all aliases
498 /// disabled - it can be allocated directly.
499 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
calcSpillCost(MCPhysReg PhysReg) const500 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
501 if (isRegUsedInInstr(PhysReg)) {
502 LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI)
503 << " is already used in instr.\n");
504 return spillImpossible;
505 }
506 switch (unsigned VirtReg = PhysRegState[PhysReg]) {
507 case regDisabled:
508 break;
509 case regFree:
510 return 0;
511 case regReserved:
512 LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding "
513 << printReg(PhysReg, TRI) << " is reserved already.\n");
514 return spillImpossible;
515 default: {
516 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
517 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
518 "Missing VirtReg entry");
519 return LRI->Dirty ? spillDirty : spillClean;
520 }
521 }
522
523 // This is a disabled register, add up cost of aliases.
524 LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n");
525 unsigned Cost = 0;
526 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
527 MCPhysReg Alias = *AI;
528 switch (unsigned VirtReg = PhysRegState[Alias]) {
529 case regDisabled:
530 break;
531 case regFree:
532 ++Cost;
533 break;
534 case regReserved:
535 return spillImpossible;
536 default: {
537 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
538 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
539 "Missing VirtReg entry");
540 Cost += LRI->Dirty ? spillDirty : spillClean;
541 break;
542 }
543 }
544 }
545 return Cost;
546 }
547
548 /// This method updates local state so that we know that PhysReg is the
549 /// proper container for VirtReg now. The physical register must not be used
550 /// for anything else when this is called.
assignVirtToPhysReg(LiveReg & LR,MCPhysReg PhysReg)551 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) {
552 unsigned VirtReg = LR.VirtReg;
553 LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to "
554 << printReg(PhysReg, TRI) << '\n');
555 assert(LR.PhysReg == 0 && "Already assigned a physreg");
556 assert(PhysReg != 0 && "Trying to assign no register");
557 LR.PhysReg = PhysReg;
558 setPhysRegState(PhysReg, VirtReg);
559 }
560
561 /// Allocates a physical register for VirtReg.
allocVirtReg(MachineInstr & MI,LiveReg & LR,unsigned Hint)562 void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint) {
563 const unsigned VirtReg = LR.VirtReg;
564
565 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
566 "Can only allocate virtual registers");
567
568 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
569 LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg)
570 << " in class " << TRI->getRegClassName(&RC) << '\n');
571
572 // Take hint when possible.
573 if (TargetRegisterInfo::isPhysicalRegister(Hint) &&
574 MRI->isAllocatable(Hint) && RC.contains(Hint)) {
575 // Ignore the hint if we would have to spill a dirty register.
576 unsigned Cost = calcSpillCost(Hint);
577 if (Cost < spillDirty) {
578 if (Cost)
579 definePhysReg(MI, Hint, regFree);
580 assignVirtToPhysReg(LR, Hint);
581 return;
582 }
583 }
584
585 // First try to find a completely free register.
586 ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
587 for (MCPhysReg PhysReg : AllocationOrder) {
588 if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
589 assignVirtToPhysReg(LR, PhysReg);
590 return;
591 }
592 }
593
594 MCPhysReg BestReg = 0;
595 unsigned BestCost = spillImpossible;
596 for (MCPhysReg PhysReg : AllocationOrder) {
597 LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' ');
598 unsigned Cost = calcSpillCost(PhysReg);
599 LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
600 // Immediate take a register with cost 0.
601 if (Cost == 0) {
602 assignVirtToPhysReg(LR, PhysReg);
603 return;
604 }
605 if (Cost < BestCost) {
606 BestReg = PhysReg;
607 BestCost = Cost;
608 }
609 }
610
611 if (!BestReg) {
612 // Nothing we can do: Report an error and keep going with an invalid
613 // allocation.
614 if (MI.isInlineAsm())
615 MI.emitError("inline assembly requires more registers than available");
616 else
617 MI.emitError("ran out of registers during register allocation");
618 definePhysReg(MI, *AllocationOrder.begin(), regFree);
619 assignVirtToPhysReg(LR, *AllocationOrder.begin());
620 return;
621 }
622
623 definePhysReg(MI, BestReg, regFree);
624 assignVirtToPhysReg(LR, BestReg);
625 }
626
627 /// Allocates a register for VirtReg and mark it as dirty.
defineVirtReg(MachineInstr & MI,unsigned OpNum,unsigned VirtReg,unsigned Hint)628 MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
629 unsigned VirtReg, unsigned Hint) {
630 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
631 "Not a virtual register");
632 LiveRegMap::iterator LRI;
633 bool New;
634 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
635 if (!LRI->PhysReg) {
636 // If there is no hint, peek at the only use of this register.
637 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
638 MRI->hasOneNonDBGUse(VirtReg)) {
639 const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
640 // It's a copy, use the destination register as a hint.
641 if (UseMI.isCopyLike())
642 Hint = UseMI.getOperand(0).getReg();
643 }
644 allocVirtReg(MI, *LRI, Hint);
645 } else if (LRI->LastUse) {
646 // Redefining a live register - kill at the last use, unless it is this
647 // instruction defining VirtReg multiple times.
648 if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
649 addKillFlag(*LRI);
650 }
651 assert(LRI->PhysReg && "Register not assigned");
652 LRI->LastUse = &MI;
653 LRI->LastOpNum = OpNum;
654 LRI->Dirty = true;
655 markRegUsedInInstr(LRI->PhysReg);
656 return LRI->PhysReg;
657 }
658
659 /// Make sure VirtReg is available in a physreg and return it.
reloadVirtReg(MachineInstr & MI,unsigned OpNum,unsigned VirtReg,unsigned Hint)660 RegAllocFast::LiveReg &RegAllocFast::reloadVirtReg(MachineInstr &MI,
661 unsigned OpNum,
662 unsigned VirtReg,
663 unsigned Hint) {
664 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
665 "Not a virtual register");
666 LiveRegMap::iterator LRI;
667 bool New;
668 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
669 MachineOperand &MO = MI.getOperand(OpNum);
670 if (!LRI->PhysReg) {
671 allocVirtReg(MI, *LRI, Hint);
672 reload(MI, VirtReg, LRI->PhysReg);
673 } else if (LRI->Dirty) {
674 if (isLastUseOfLocalReg(MO)) {
675 LLVM_DEBUG(dbgs() << "Killing last use: " << MO << '\n');
676 if (MO.isUse())
677 MO.setIsKill();
678 else
679 MO.setIsDead();
680 } else if (MO.isKill()) {
681 LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << '\n');
682 MO.setIsKill(false);
683 } else if (MO.isDead()) {
684 LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << '\n');
685 MO.setIsDead(false);
686 }
687 } else if (MO.isKill()) {
688 // We must remove kill flags from uses of reloaded registers because the
689 // register would be killed immediately, and there might be a second use:
690 // %foo = OR killed %x, %x
691 // This would cause a second reload of %x into a different register.
692 LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << '\n');
693 MO.setIsKill(false);
694 } else if (MO.isDead()) {
695 LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << '\n');
696 MO.setIsDead(false);
697 }
698 assert(LRI->PhysReg && "Register not assigned");
699 LRI->LastUse = &MI;
700 LRI->LastOpNum = OpNum;
701 markRegUsedInInstr(LRI->PhysReg);
702 return *LRI;
703 }
704
705 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
706 /// may invalidate any operand pointers. Return true if the operand kills its
707 /// register.
setPhysReg(MachineInstr & MI,MachineOperand & MO,MCPhysReg PhysReg)708 bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
709 MCPhysReg PhysReg) {
710 bool Dead = MO.isDead();
711 if (!MO.getSubReg()) {
712 MO.setReg(PhysReg);
713 MO.setIsRenamable(true);
714 return MO.isKill() || Dead;
715 }
716
717 // Handle subregister index.
718 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
719 MO.setIsRenamable(true);
720 MO.setSubReg(0);
721
722 // A kill flag implies killing the full register. Add corresponding super
723 // register kill.
724 if (MO.isKill()) {
725 MI.addRegisterKilled(PhysReg, TRI, true);
726 return true;
727 }
728
729 // A <def,read-undef> of a sub-register requires an implicit def of the full
730 // register.
731 if (MO.isDef() && MO.isUndef())
732 MI.addRegisterDefined(PhysReg, TRI);
733
734 return Dead;
735 }
736
737 // Handles special instruction operand like early clobbers and tied ops when
738 // there are additional physreg defines.
handleThroughOperands(MachineInstr & MI,SmallVectorImpl<unsigned> & VirtDead)739 void RegAllocFast::handleThroughOperands(MachineInstr &MI,
740 SmallVectorImpl<unsigned> &VirtDead) {
741 LLVM_DEBUG(dbgs() << "Scanning for through registers:");
742 SmallSet<unsigned, 8> ThroughRegs;
743 for (const MachineOperand &MO : MI.operands()) {
744 if (!MO.isReg()) continue;
745 unsigned Reg = MO.getReg();
746 if (!TargetRegisterInfo::isVirtualRegister(Reg))
747 continue;
748 if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) ||
749 (MO.getSubReg() && MI.readsVirtualRegister(Reg))) {
750 if (ThroughRegs.insert(Reg).second)
751 LLVM_DEBUG(dbgs() << ' ' << printReg(Reg));
752 }
753 }
754
755 // If any physreg defines collide with preallocated through registers,
756 // we must spill and reallocate.
757 LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
758 for (const MachineOperand &MO : MI.operands()) {
759 if (!MO.isReg() || !MO.isDef()) continue;
760 unsigned Reg = MO.getReg();
761 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
762 markRegUsedInInstr(Reg);
763 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
764 if (ThroughRegs.count(PhysRegState[*AI]))
765 definePhysReg(MI, *AI, regFree);
766 }
767 }
768
769 SmallVector<unsigned, 8> PartialDefs;
770 LLVM_DEBUG(dbgs() << "Allocating tied uses.\n");
771 for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
772 MachineOperand &MO = MI.getOperand(I);
773 if (!MO.isReg()) continue;
774 unsigned Reg = MO.getReg();
775 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
776 if (MO.isUse()) {
777 if (!MO.isTied()) continue;
778 LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO
779 << ") is tied to operand " << MI.findTiedOperandIdx(I)
780 << ".\n");
781 LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
782 MCPhysReg PhysReg = LR.PhysReg;
783 setPhysReg(MI, MO, PhysReg);
784 // Note: we don't update the def operand yet. That would cause the normal
785 // def-scan to attempt spilling.
786 } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) {
787 LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << '\n');
788 // Reload the register, but don't assign to the operand just yet.
789 // That would confuse the later phys-def processing pass.
790 LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
791 PartialDefs.push_back(LR.PhysReg);
792 }
793 }
794
795 LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n");
796 for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
797 const MachineOperand &MO = MI.getOperand(I);
798 if (!MO.isReg()) continue;
799 unsigned Reg = MO.getReg();
800 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
801 if (!MO.isEarlyClobber())
802 continue;
803 // Note: defineVirtReg may invalidate MO.
804 MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, 0);
805 if (setPhysReg(MI, MI.getOperand(I), PhysReg))
806 VirtDead.push_back(Reg);
807 }
808
809 // Restore UsedInInstr to a state usable for allocating normal virtual uses.
810 UsedInInstr.clear();
811 for (const MachineOperand &MO : MI.operands()) {
812 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
813 unsigned Reg = MO.getReg();
814 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
815 LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI)
816 << " as used in instr\n");
817 markRegUsedInInstr(Reg);
818 }
819
820 // Also mark PartialDefs as used to avoid reallocation.
821 for (unsigned PartialDef : PartialDefs)
822 markRegUsedInInstr(PartialDef);
823 }
824
825 #ifndef NDEBUG
dumpState()826 void RegAllocFast::dumpState() {
827 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
828 if (PhysRegState[Reg] == regDisabled) continue;
829 dbgs() << " " << printReg(Reg, TRI);
830 switch(PhysRegState[Reg]) {
831 case regFree:
832 break;
833 case regReserved:
834 dbgs() << "*";
835 break;
836 default: {
837 dbgs() << '=' << printReg(PhysRegState[Reg]);
838 LiveRegMap::iterator LRI = findLiveVirtReg(PhysRegState[Reg]);
839 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
840 "Missing VirtReg entry");
841 if (LRI->Dirty)
842 dbgs() << "*";
843 assert(LRI->PhysReg == Reg && "Bad inverse map");
844 break;
845 }
846 }
847 }
848 dbgs() << '\n';
849 // Check that LiveVirtRegs is the inverse.
850 for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
851 e = LiveVirtRegs.end(); i != e; ++i) {
852 if (!i->PhysReg)
853 continue;
854 assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) &&
855 "Bad map key");
856 assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) &&
857 "Bad map value");
858 assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
859 }
860 }
861 #endif
862
allocateInstruction(MachineInstr & MI)863 void RegAllocFast::allocateInstruction(MachineInstr &MI) {
864 const MCInstrDesc &MCID = MI.getDesc();
865
866 // If this is a copy, we may be able to coalesce.
867 unsigned CopySrcReg = 0;
868 unsigned CopyDstReg = 0;
869 unsigned CopySrcSub = 0;
870 unsigned CopyDstSub = 0;
871 if (MI.isCopy()) {
872 CopyDstReg = MI.getOperand(0).getReg();
873 CopySrcReg = MI.getOperand(1).getReg();
874 CopyDstSub = MI.getOperand(0).getSubReg();
875 CopySrcSub = MI.getOperand(1).getSubReg();
876 }
877
878 // Track registers used by instruction.
879 UsedInInstr.clear();
880
881 // First scan.
882 // Mark physreg uses and early clobbers as used.
883 // Find the end of the virtreg operands
884 unsigned VirtOpEnd = 0;
885 bool hasTiedOps = false;
886 bool hasEarlyClobbers = false;
887 bool hasPartialRedefs = false;
888 bool hasPhysDefs = false;
889 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
890 MachineOperand &MO = MI.getOperand(i);
891 // Make sure MRI knows about registers clobbered by regmasks.
892 if (MO.isRegMask()) {
893 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
894 continue;
895 }
896 if (!MO.isReg()) continue;
897 unsigned Reg = MO.getReg();
898 if (!Reg) continue;
899 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
900 VirtOpEnd = i+1;
901 if (MO.isUse()) {
902 hasTiedOps = hasTiedOps ||
903 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
904 } else {
905 if (MO.isEarlyClobber())
906 hasEarlyClobbers = true;
907 if (MO.getSubReg() && MI.readsVirtualRegister(Reg))
908 hasPartialRedefs = true;
909 }
910 continue;
911 }
912 if (!MRI->isAllocatable(Reg)) continue;
913 if (MO.isUse()) {
914 usePhysReg(MO);
915 } else if (MO.isEarlyClobber()) {
916 definePhysReg(MI, Reg,
917 (MO.isImplicit() || MO.isDead()) ? regFree : regReserved);
918 hasEarlyClobbers = true;
919 } else
920 hasPhysDefs = true;
921 }
922
923 // The instruction may have virtual register operands that must be allocated
924 // the same register at use-time and def-time: early clobbers and tied
925 // operands. If there are also physical defs, these registers must avoid
926 // both physical defs and uses, making them more constrained than normal
927 // operands.
928 // Similarly, if there are multiple defs and tied operands, we must make
929 // sure the same register is allocated to uses and defs.
930 // We didn't detect inline asm tied operands above, so just make this extra
931 // pass for all inline asm.
932 if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
933 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
934 handleThroughOperands(MI, VirtDead);
935 // Don't attempt coalescing when we have funny stuff going on.
936 CopyDstReg = 0;
937 // Pretend we have early clobbers so the use operands get marked below.
938 // This is not necessary for the common case of a single tied use.
939 hasEarlyClobbers = true;
940 }
941
942 // Second scan.
943 // Allocate virtreg uses.
944 for (unsigned I = 0; I != VirtOpEnd; ++I) {
945 MachineOperand &MO = MI.getOperand(I);
946 if (!MO.isReg()) continue;
947 unsigned Reg = MO.getReg();
948 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
949 if (MO.isUse()) {
950 LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg);
951 MCPhysReg PhysReg = LR.PhysReg;
952 CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
953 if (setPhysReg(MI, MO, PhysReg))
954 killVirtReg(LR);
955 }
956 }
957
958 // Track registers defined by instruction - early clobbers and tied uses at
959 // this point.
960 UsedInInstr.clear();
961 if (hasEarlyClobbers) {
962 for (const MachineOperand &MO : MI.operands()) {
963 if (!MO.isReg()) continue;
964 unsigned Reg = MO.getReg();
965 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
966 // Look for physreg defs and tied uses.
967 if (!MO.isDef() && !MO.isTied()) continue;
968 markRegUsedInInstr(Reg);
969 }
970 }
971
972 unsigned DefOpEnd = MI.getNumOperands();
973 if (MI.isCall()) {
974 // Spill all virtregs before a call. This serves one purpose: If an
975 // exception is thrown, the landing pad is going to expect to find
976 // registers in their spill slots.
977 // Note: although this is appealing to just consider all definitions
978 // as call-clobbered, this is not correct because some of those
979 // definitions may be used later on and we do not want to reuse
980 // those for virtual registers in between.
981 LLVM_DEBUG(dbgs() << " Spilling remaining registers before call.\n");
982 spillAll(MI);
983 }
984
985 // Third scan.
986 // Allocate defs and collect dead defs.
987 for (unsigned I = 0; I != DefOpEnd; ++I) {
988 const MachineOperand &MO = MI.getOperand(I);
989 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
990 continue;
991 unsigned Reg = MO.getReg();
992
993 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
994 if (!MRI->isAllocatable(Reg)) continue;
995 definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
996 continue;
997 }
998 MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, CopySrcReg);
999 if (setPhysReg(MI, MI.getOperand(I), PhysReg)) {
1000 VirtDead.push_back(Reg);
1001 CopyDstReg = 0; // cancel coalescing;
1002 } else
1003 CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0;
1004 }
1005
1006 // Kill dead defs after the scan to ensure that multiple defs of the same
1007 // register are allocated identically. We didn't need to do this for uses
1008 // because we are crerating our own kill flags, and they are always at the
1009 // last use.
1010 for (unsigned VirtReg : VirtDead)
1011 killVirtReg(VirtReg);
1012 VirtDead.clear();
1013
1014 LLVM_DEBUG(dbgs() << "<< " << MI);
1015 if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) {
1016 LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1017 Coalesced.push_back(&MI);
1018 }
1019 }
1020
handleDebugValue(MachineInstr & MI)1021 void RegAllocFast::handleDebugValue(MachineInstr &MI) {
1022 MachineOperand &MO = MI.getOperand(0);
1023
1024 // Ignore DBG_VALUEs that aren't based on virtual registers. These are
1025 // mostly constants and frame indices.
1026 if (!MO.isReg())
1027 return;
1028 unsigned Reg = MO.getReg();
1029 if (!TargetRegisterInfo::isVirtualRegister(Reg))
1030 return;
1031
1032 // See if this virtual register has already been allocated to a physical
1033 // register or spilled to a stack slot.
1034 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1035 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1036 setPhysReg(MI, MO, LRI->PhysReg);
1037 } else {
1038 int SS = StackSlotForVirtReg[Reg];
1039 if (SS != -1) {
1040 // Modify DBG_VALUE now that the value is in a spill slot.
1041 updateDbgValueForSpill(MI, SS);
1042 LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << MI);
1043 return;
1044 }
1045
1046 // We can't allocate a physreg for a DebugValue, sorry!
1047 LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
1048 MO.setReg(0);
1049 }
1050
1051 // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
1052 // that future spills of Reg will have DBG_VALUEs.
1053 LiveDbgValueMap[Reg].push_back(&MI);
1054 }
1055
allocateBasicBlock(MachineBasicBlock & MBB)1056 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
1057 this->MBB = &MBB;
1058 LLVM_DEBUG(dbgs() << "\nAllocating " << MBB);
1059
1060 PhysRegState.assign(TRI->getNumRegs(), regDisabled);
1061 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1062
1063 MachineBasicBlock::iterator MII = MBB.begin();
1064
1065 // Add live-in registers as live.
1066 for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins())
1067 if (MRI->isAllocatable(LI.PhysReg))
1068 definePhysReg(MII, LI.PhysReg, regReserved);
1069
1070 VirtDead.clear();
1071 Coalesced.clear();
1072
1073 // Otherwise, sequentially allocate each instruction in the MBB.
1074 for (MachineInstr &MI : MBB) {
1075 LLVM_DEBUG(
1076 dbgs() << "\n>> " << MI << "Regs:";
1077 dumpState()
1078 );
1079
1080 // Special handling for debug values. Note that they are not allowed to
1081 // affect codegen of the other instructions in any way.
1082 if (MI.isDebugValue()) {
1083 handleDebugValue(MI);
1084 continue;
1085 }
1086
1087 allocateInstruction(MI);
1088 }
1089
1090 // Spill all physical registers holding virtual registers now.
1091 LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1092 spillAll(MBB.getFirstTerminator());
1093
1094 // Erase all the coalesced copies. We are delaying it until now because
1095 // LiveVirtRegs might refer to the instrs.
1096 for (MachineInstr *MI : Coalesced)
1097 MBB.erase(MI);
1098 NumCoalesced += Coalesced.size();
1099
1100 LLVM_DEBUG(MBB.dump());
1101 }
1102
runOnMachineFunction(MachineFunction & MF)1103 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1104 LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1105 << "********** Function: " << MF.getName() << '\n');
1106 MRI = &MF.getRegInfo();
1107 const TargetSubtargetInfo &STI = MF.getSubtarget();
1108 TRI = STI.getRegisterInfo();
1109 TII = STI.getInstrInfo();
1110 MFI = &MF.getFrameInfo();
1111 MRI->freezeReservedRegs(MF);
1112 RegClassInfo.runOnMachineFunction(MF);
1113 UsedInInstr.clear();
1114 UsedInInstr.setUniverse(TRI->getNumRegUnits());
1115
1116 // initialize the virtual->physical register map to have a 'null'
1117 // mapping for all virtual registers
1118 unsigned NumVirtRegs = MRI->getNumVirtRegs();
1119 StackSlotForVirtReg.resize(NumVirtRegs);
1120 LiveVirtRegs.setUniverse(NumVirtRegs);
1121
1122 // Loop over all of the basic blocks, eliminating virtual register references
1123 for (MachineBasicBlock &MBB : MF)
1124 allocateBasicBlock(MBB);
1125
1126 // All machine operands and other references to virtual registers have been
1127 // replaced. Remove the virtual registers.
1128 MRI->clearVirtRegs();
1129
1130 StackSlotForVirtReg.clear();
1131 LiveDbgValueMap.clear();
1132 return true;
1133 }
1134
createFastRegisterAllocator()1135 FunctionPass *llvm::createFastRegisterAllocator() {
1136 return new RegAllocFast();
1137 }
1138