1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the AggressiveAntiDepBreaker class, which 11 // implements register anti-dependence breaking during post-RA 12 // scheduling. It attempts to break all anti-dependencies within a 13 // block. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #include "AggressiveAntiDepBreaker.h" 18 #include "llvm/CodeGen/MachineBasicBlock.h" 19 #include "llvm/CodeGen/MachineFrameInfo.h" 20 #include "llvm/CodeGen/MachineInstr.h" 21 #include "llvm/CodeGen/RegisterClassInfo.h" 22 #include "llvm/Support/CommandLine.h" 23 #include "llvm/Support/Debug.h" 24 #include "llvm/Support/ErrorHandling.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include "llvm/Target/TargetInstrInfo.h" 27 #include "llvm/Target/TargetRegisterInfo.h" 28 using namespace llvm; 29 30 #define DEBUG_TYPE "post-RA-sched" 31 32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod 33 static cl::opt<int> 34 DebugDiv("agg-antidep-debugdiv", 35 cl::desc("Debug control for aggressive anti-dep breaker"), 36 cl::init(0), cl::Hidden); 37 static cl::opt<int> 38 DebugMod("agg-antidep-debugmod", 39 cl::desc("Debug control for aggressive anti-dep breaker"), 40 cl::init(0), cl::Hidden); 41 42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs, 43 MachineBasicBlock *BB) : 44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0), 45 GroupNodeIndices(TargetRegs, 0), 46 KillIndices(TargetRegs, 0), 47 DefIndices(TargetRegs, 0) 48 { 49 const unsigned BBSize = BB->size(); 50 for (unsigned i = 0; i < NumTargetRegs; ++i) { 51 // Initialize all registers to be in their own group. Initially we 52 // assign the register to the same-indexed GroupNode. 53 GroupNodeIndices[i] = i; 54 // Initialize the indices to indicate that no registers are live. 55 KillIndices[i] = ~0u; 56 DefIndices[i] = BBSize; 57 } 58 } 59 60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) { 61 unsigned Node = GroupNodeIndices[Reg]; 62 while (GroupNodes[Node] != Node) 63 Node = GroupNodes[Node]; 64 65 return Node; 66 } 67 68 void AggressiveAntiDepState::GetGroupRegs( 69 unsigned Group, 70 std::vector<unsigned> &Regs, 71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs) 72 { 73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) { 74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0)) 75 Regs.push_back(Reg); 76 } 77 } 78 79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2) 80 { 81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); 82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); 83 84 // find group for each register 85 unsigned Group1 = GetGroup(Reg1); 86 unsigned Group2 = GetGroup(Reg2); 87 88 // if either group is 0, then that must become the parent 89 unsigned Parent = (Group1 == 0) ? Group1 : Group2; 90 unsigned Other = (Parent == Group1) ? Group2 : Group1; 91 GroupNodes.at(Other) = Parent; 92 return Parent; 93 } 94 95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg) 96 { 97 // Create a new GroupNode for Reg. Reg's existing GroupNode must 98 // stay as is because there could be other GroupNodes referring to 99 // it. 100 unsigned idx = GroupNodes.size(); 101 GroupNodes.push_back(idx); 102 GroupNodeIndices[Reg] = idx; 103 return idx; 104 } 105 106 bool AggressiveAntiDepState::IsLive(unsigned Reg) 107 { 108 // KillIndex must be defined and DefIndex not defined for a register 109 // to be live. 110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); 111 } 112 113 AggressiveAntiDepBreaker::AggressiveAntiDepBreaker( 114 MachineFunction &MFi, const RegisterClassInfo &RCI, 115 TargetSubtargetInfo::RegClassVector &CriticalPathRCs) 116 : AntiDepBreaker(), MF(MFi), MRI(MF.getRegInfo()), 117 TII(MF.getSubtarget().getInstrInfo()), 118 TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI), 119 State(nullptr) { 120 /* Collect a bitset of all registers that are only broken if they 121 are on the critical path. */ 122 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) { 123 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]); 124 if (CriticalPathSet.none()) 125 CriticalPathSet = CPSet; 126 else 127 CriticalPathSet |= CPSet; 128 } 129 130 DEBUG(dbgs() << "AntiDep Critical-Path Registers:"); 131 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1; 132 r = CriticalPathSet.find_next(r)) 133 dbgs() << " " << TRI->getName(r)); 134 DEBUG(dbgs() << '\n'); 135 } 136 137 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { 138 delete State; 139 } 140 141 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { 142 assert(!State); 143 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB); 144 145 bool IsReturnBlock = BB->isReturnBlock(); 146 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 147 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 148 149 // Examine the live-in regs of all successors. 150 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), 151 SE = BB->succ_end(); SI != SE; ++SI) 152 for (const auto &LI : (*SI)->liveins()) { 153 for (MCRegAliasIterator AI(LI.PhysReg, TRI, true); AI.isValid(); ++AI) { 154 unsigned Reg = *AI; 155 State->UnionGroups(Reg, 0); 156 KillIndices[Reg] = BB->size(); 157 DefIndices[Reg] = ~0u; 158 } 159 } 160 161 // Mark live-out callee-saved registers. In a return block this is 162 // all callee-saved registers. In non-return this is any 163 // callee-saved register that is not saved in the prolog. 164 const MachineFrameInfo *MFI = MF.getFrameInfo(); 165 BitVector Pristine = MFI->getPristineRegs(MF); 166 for (const MCPhysReg *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) { 167 unsigned Reg = *I; 168 if (!IsReturnBlock && !Pristine.test(Reg)) continue; 169 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 170 unsigned AliasReg = *AI; 171 State->UnionGroups(AliasReg, 0); 172 KillIndices[AliasReg] = BB->size(); 173 DefIndices[AliasReg] = ~0u; 174 } 175 } 176 } 177 178 void AggressiveAntiDepBreaker::FinishBlock() { 179 delete State; 180 State = nullptr; 181 } 182 183 void AggressiveAntiDepBreaker::Observe(MachineInstr &MI, unsigned Count, 184 unsigned InsertPosIndex) { 185 assert(Count < InsertPosIndex && "Instruction index out of expected range!"); 186 187 std::set<unsigned> PassthruRegs; 188 GetPassthruRegs(MI, PassthruRegs); 189 PrescanInstruction(MI, Count, PassthruRegs); 190 ScanInstruction(MI, Count); 191 192 DEBUG(dbgs() << "Observe: "); 193 DEBUG(MI.dump()); 194 DEBUG(dbgs() << "\tRegs:"); 195 196 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 197 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) { 198 // If Reg is current live, then mark that it can't be renamed as 199 // we don't know the extent of its live-range anymore (now that it 200 // has been scheduled). If it is not live but was defined in the 201 // previous schedule region, then set its def index to the most 202 // conservative location (i.e. the beginning of the previous 203 // schedule region). 204 if (State->IsLive(Reg)) { 205 DEBUG(if (State->GetGroup(Reg) != 0) 206 dbgs() << " " << TRI->getName(Reg) << "=g" << 207 State->GetGroup(Reg) << "->g0(region live-out)"); 208 State->UnionGroups(Reg, 0); 209 } else if ((DefIndices[Reg] < InsertPosIndex) 210 && (DefIndices[Reg] >= Count)) { 211 DefIndices[Reg] = Count; 212 } 213 } 214 DEBUG(dbgs() << '\n'); 215 } 216 217 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr &MI, 218 MachineOperand &MO) { 219 if (!MO.isReg() || !MO.isImplicit()) 220 return false; 221 222 unsigned Reg = MO.getReg(); 223 if (Reg == 0) 224 return false; 225 226 MachineOperand *Op = nullptr; 227 if (MO.isDef()) 228 Op = MI.findRegisterUseOperand(Reg, true); 229 else 230 Op = MI.findRegisterDefOperand(Reg); 231 232 return(Op && Op->isImplicit()); 233 } 234 235 void AggressiveAntiDepBreaker::GetPassthruRegs( 236 MachineInstr &MI, std::set<unsigned> &PassthruRegs) { 237 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 238 MachineOperand &MO = MI.getOperand(i); 239 if (!MO.isReg()) continue; 240 if ((MO.isDef() && MI.isRegTiedToUseOperand(i)) || 241 IsImplicitDefUse(MI, MO)) { 242 const unsigned Reg = MO.getReg(); 243 for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true); 244 SubRegs.isValid(); ++SubRegs) 245 PassthruRegs.insert(*SubRegs); 246 } 247 } 248 } 249 250 /// AntiDepEdges - Return in Edges the anti- and output- dependencies 251 /// in SU that we want to consider for breaking. 252 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) { 253 SmallSet<unsigned, 4> RegSet; 254 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 255 P != PE; ++P) { 256 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) { 257 if (RegSet.insert(P->getReg()).second) 258 Edges.push_back(&*P); 259 } 260 } 261 } 262 263 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up 264 /// critical path. 265 static const SUnit *CriticalPathStep(const SUnit *SU) { 266 const SDep *Next = nullptr; 267 unsigned NextDepth = 0; 268 // Find the predecessor edge with the greatest depth. 269 if (SU) { 270 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 271 P != PE; ++P) { 272 const SUnit *PredSU = P->getSUnit(); 273 unsigned PredLatency = P->getLatency(); 274 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency; 275 // In the case of a latency tie, prefer an anti-dependency edge over 276 // other types of edges. 277 if (NextDepth < PredTotalLatency || 278 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) { 279 NextDepth = PredTotalLatency; 280 Next = &*P; 281 } 282 } 283 } 284 285 return (Next) ? Next->getSUnit() : nullptr; 286 } 287 288 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx, 289 const char *tag, 290 const char *header, 291 const char *footer) { 292 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 293 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 294 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 295 RegRefs = State->GetRegRefs(); 296 297 // FIXME: We must leave subregisters of live super registers as live, so that 298 // we don't clear out the register tracking information for subregisters of 299 // super registers we're still tracking (and with which we're unioning 300 // subregister definitions). 301 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) 302 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) { 303 DEBUG(if (!header && footer) dbgs() << footer); 304 return; 305 } 306 307 if (!State->IsLive(Reg)) { 308 KillIndices[Reg] = KillIdx; 309 DefIndices[Reg] = ~0u; 310 RegRefs.erase(Reg); 311 State->LeaveGroup(Reg); 312 DEBUG(if (header) { 313 dbgs() << header << TRI->getName(Reg); header = nullptr; }); 314 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag); 315 } 316 // Repeat for subregisters. 317 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { 318 unsigned SubregReg = *SubRegs; 319 if (!State->IsLive(SubregReg)) { 320 KillIndices[SubregReg] = KillIdx; 321 DefIndices[SubregReg] = ~0u; 322 RegRefs.erase(SubregReg); 323 State->LeaveGroup(SubregReg); 324 DEBUG(if (header) { 325 dbgs() << header << TRI->getName(Reg); header = nullptr; }); 326 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" << 327 State->GetGroup(SubregReg) << tag); 328 } 329 } 330 331 DEBUG(if (!header && footer) dbgs() << footer); 332 } 333 334 void AggressiveAntiDepBreaker::PrescanInstruction( 335 MachineInstr &MI, unsigned Count, std::set<unsigned> &PassthruRegs) { 336 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 337 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 338 RegRefs = State->GetRegRefs(); 339 340 // Handle dead defs by simulating a last-use of the register just 341 // after the def. A dead def can occur because the def is truly 342 // dead, or because only a subregister is live at the def. If we 343 // don't do this the dead def will be incorrectly merged into the 344 // previous def. 345 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 346 MachineOperand &MO = MI.getOperand(i); 347 if (!MO.isReg() || !MO.isDef()) continue; 348 unsigned Reg = MO.getReg(); 349 if (Reg == 0) continue; 350 351 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n"); 352 } 353 354 DEBUG(dbgs() << "\tDef Groups:"); 355 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 356 MachineOperand &MO = MI.getOperand(i); 357 if (!MO.isReg() || !MO.isDef()) continue; 358 unsigned Reg = MO.getReg(); 359 if (Reg == 0) continue; 360 361 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg)); 362 363 // If MI's defs have a special allocation requirement, don't allow 364 // any def registers to be changed. Also assume all registers 365 // defined in a call must not be changed (ABI). Inline assembly may 366 // reference either system calls or the register directly. Skip it until we 367 // can tell user specified registers from compiler-specified. 368 if (MI.isCall() || MI.hasExtraDefRegAllocReq() || TII->isPredicated(MI) || 369 MI.isInlineAsm()) { 370 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 371 State->UnionGroups(Reg, 0); 372 } 373 374 // Any aliased that are live at this point are completely or 375 // partially defined here, so group those aliases with Reg. 376 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) { 377 unsigned AliasReg = *AI; 378 if (State->IsLive(AliasReg)) { 379 State->UnionGroups(Reg, AliasReg); 380 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " << 381 TRI->getName(AliasReg) << ")"); 382 } 383 } 384 385 // Note register reference... 386 const TargetRegisterClass *RC = nullptr; 387 if (i < MI.getDesc().getNumOperands()) 388 RC = TII->getRegClass(MI.getDesc(), i, TRI, MF); 389 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 390 RegRefs.insert(std::make_pair(Reg, RR)); 391 } 392 393 DEBUG(dbgs() << '\n'); 394 395 // Scan the register defs for this instruction and update 396 // live-ranges. 397 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 398 MachineOperand &MO = MI.getOperand(i); 399 if (!MO.isReg() || !MO.isDef()) continue; 400 unsigned Reg = MO.getReg(); 401 if (Reg == 0) continue; 402 // Ignore KILLs and passthru registers for liveness... 403 if (MI.isKill() || (PassthruRegs.count(Reg) != 0)) 404 continue; 405 406 // Update def for Reg and aliases. 407 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 408 // We need to be careful here not to define already-live super registers. 409 // If the super register is already live, then this definition is not 410 // a definition of the whole super register (just a partial insertion 411 // into it). Earlier subregister definitions (which we've not yet visited 412 // because we're iterating bottom-up) need to be linked to the same group 413 // as this definition. 414 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) 415 continue; 416 417 DefIndices[*AI] = Count; 418 } 419 } 420 } 421 422 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr &MI, 423 unsigned Count) { 424 DEBUG(dbgs() << "\tUse Groups:"); 425 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 426 RegRefs = State->GetRegRefs(); 427 428 // If MI's uses have special allocation requirement, don't allow 429 // any use registers to be changed. Also assume all registers 430 // used in a call must not be changed (ABI). 431 // Inline Assembly register uses also cannot be safely changed. 432 // FIXME: The issue with predicated instruction is more complex. We are being 433 // conservatively here because the kill markers cannot be trusted after 434 // if-conversion: 435 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14] 436 // ... 437 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395] 438 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12] 439 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8) 440 // 441 // The first R6 kill is not really a kill since it's killed by a predicated 442 // instruction which may not be executed. The second R6 def may or may not 443 // re-define R6 so it's not safe to change it since the last R6 use cannot be 444 // changed. 445 bool Special = MI.isCall() || MI.hasExtraSrcRegAllocReq() || 446 TII->isPredicated(MI) || MI.isInlineAsm(); 447 448 // Scan the register uses for this instruction and update 449 // live-ranges, groups and RegRefs. 450 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 451 MachineOperand &MO = MI.getOperand(i); 452 if (!MO.isReg() || !MO.isUse()) continue; 453 unsigned Reg = MO.getReg(); 454 if (Reg == 0) continue; 455 456 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << 457 State->GetGroup(Reg)); 458 459 // It wasn't previously live but now it is, this is a kill. Forget 460 // the previous live-range information and start a new live-range 461 // for the register. 462 HandleLastUse(Reg, Count, "(last-use)"); 463 464 if (Special) { 465 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 466 State->UnionGroups(Reg, 0); 467 } 468 469 // Note register reference... 470 const TargetRegisterClass *RC = nullptr; 471 if (i < MI.getDesc().getNumOperands()) 472 RC = TII->getRegClass(MI.getDesc(), i, TRI, MF); 473 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 474 RegRefs.insert(std::make_pair(Reg, RR)); 475 } 476 477 DEBUG(dbgs() << '\n'); 478 479 // Form a group of all defs and uses of a KILL instruction to ensure 480 // that all registers are renamed as a group. 481 if (MI.isKill()) { 482 DEBUG(dbgs() << "\tKill Group:"); 483 484 unsigned FirstReg = 0; 485 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 486 MachineOperand &MO = MI.getOperand(i); 487 if (!MO.isReg()) continue; 488 unsigned Reg = MO.getReg(); 489 if (Reg == 0) continue; 490 491 if (FirstReg != 0) { 492 DEBUG(dbgs() << "=" << TRI->getName(Reg)); 493 State->UnionGroups(FirstReg, Reg); 494 } else { 495 DEBUG(dbgs() << " " << TRI->getName(Reg)); 496 FirstReg = Reg; 497 } 498 } 499 500 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n'); 501 } 502 } 503 504 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { 505 BitVector BV(TRI->getNumRegs(), false); 506 bool first = true; 507 508 // Check all references that need rewriting for Reg. For each, use 509 // the corresponding register class to narrow the set of registers 510 // that are appropriate for renaming. 511 for (const auto &Q : make_range(State->GetRegRefs().equal_range(Reg))) { 512 const TargetRegisterClass *RC = Q.second.RC; 513 if (!RC) continue; 514 515 BitVector RCBV = TRI->getAllocatableSet(MF, RC); 516 if (first) { 517 BV |= RCBV; 518 first = false; 519 } else { 520 BV &= RCBV; 521 } 522 523 DEBUG(dbgs() << " " << TRI->getRegClassName(RC)); 524 } 525 526 return BV; 527 } 528 529 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( 530 unsigned AntiDepGroupIndex, 531 RenameOrderType& RenameOrder, 532 std::map<unsigned, unsigned> &RenameMap) { 533 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 534 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 535 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 536 RegRefs = State->GetRegRefs(); 537 538 // Collect all referenced registers in the same group as 539 // AntiDepReg. These all need to be renamed together if we are to 540 // break the anti-dependence. 541 std::vector<unsigned> Regs; 542 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs); 543 assert(Regs.size() > 0 && "Empty register group!"); 544 if (Regs.size() == 0) 545 return false; 546 547 // Find the "superest" register in the group. At the same time, 548 // collect the BitVector of registers that can be used to rename 549 // each register. 550 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex 551 << ":\n"); 552 std::map<unsigned, BitVector> RenameRegisterMap; 553 unsigned SuperReg = 0; 554 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 555 unsigned Reg = Regs[i]; 556 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) 557 SuperReg = Reg; 558 559 // If Reg has any references, then collect possible rename regs 560 if (RegRefs.count(Reg) > 0) { 561 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":"); 562 563 BitVector &BV = RenameRegisterMap[Reg]; 564 assert(BV.empty()); 565 BV = GetRenameRegisters(Reg); 566 567 DEBUG({ 568 dbgs() << " ::"; 569 for (int r = BV.find_first(); r != -1; r = BV.find_next(r)) 570 dbgs() << " " << TRI->getName(r); 571 dbgs() << "\n"; 572 }); 573 } 574 } 575 576 // All group registers should be a subreg of SuperReg. 577 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 578 unsigned Reg = Regs[i]; 579 if (Reg == SuperReg) continue; 580 bool IsSub = TRI->isSubRegister(SuperReg, Reg); 581 // FIXME: remove this once PR18663 has been properly fixed. For now, 582 // return a conservative answer: 583 // assert(IsSub && "Expecting group subregister"); 584 if (!IsSub) 585 return false; 586 } 587 588 #ifndef NDEBUG 589 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod 590 if (DebugDiv > 0) { 591 static int renamecnt = 0; 592 if (renamecnt++ % DebugDiv != DebugMod) 593 return false; 594 595 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) << 596 " for debug ***\n"; 597 } 598 #endif 599 600 // Check each possible rename register for SuperReg in round-robin 601 // order. If that register is available, and the corresponding 602 // registers are available for the other group subregisters, then we 603 // can use those registers to rename. 604 605 // FIXME: Using getMinimalPhysRegClass is very conservative. We should 606 // check every use of the register and find the largest register class 607 // that can be used in all of them. 608 const TargetRegisterClass *SuperRC = 609 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other); 610 611 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC); 612 if (Order.empty()) { 613 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n"); 614 return false; 615 } 616 617 DEBUG(dbgs() << "\tFind Registers:"); 618 619 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size())); 620 621 unsigned OrigR = RenameOrder[SuperRC]; 622 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR); 623 unsigned R = OrigR; 624 do { 625 if (R == 0) R = Order.size(); 626 --R; 627 const unsigned NewSuperReg = Order[R]; 628 // Don't consider non-allocatable registers 629 if (!MRI.isAllocatable(NewSuperReg)) continue; 630 // Don't replace a register with itself. 631 if (NewSuperReg == SuperReg) continue; 632 633 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':'); 634 RenameMap.clear(); 635 636 // For each referenced group register (which must be a SuperReg or 637 // a subregister of SuperReg), find the corresponding subregister 638 // of NewSuperReg and make sure it is free to be renamed. 639 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 640 unsigned Reg = Regs[i]; 641 unsigned NewReg = 0; 642 if (Reg == SuperReg) { 643 NewReg = NewSuperReg; 644 } else { 645 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg); 646 if (NewSubRegIdx != 0) 647 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx); 648 } 649 650 DEBUG(dbgs() << " " << TRI->getName(NewReg)); 651 652 // Check if Reg can be renamed to NewReg. 653 if (!RenameRegisterMap[Reg].test(NewReg)) { 654 DEBUG(dbgs() << "(no rename)"); 655 goto next_super_reg; 656 } 657 658 // If NewReg is dead and NewReg's most recent def is not before 659 // Regs's kill, it's safe to replace Reg with NewReg. We 660 // must also check all aliases of NewReg, because we can't define a 661 // register when any sub or super is already live. 662 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) { 663 DEBUG(dbgs() << "(live)"); 664 goto next_super_reg; 665 } else { 666 bool found = false; 667 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) { 668 unsigned AliasReg = *AI; 669 if (State->IsLive(AliasReg) || 670 (KillIndices[Reg] > DefIndices[AliasReg])) { 671 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)"); 672 found = true; 673 break; 674 } 675 } 676 if (found) 677 goto next_super_reg; 678 } 679 680 // We cannot rename 'Reg' to 'NewReg' if one of the uses of 'Reg' also 681 // defines 'NewReg' via an early-clobber operand. 682 for (const auto &Q : make_range(RegRefs.equal_range(Reg))) { 683 MachineInstr *UseMI = Q.second.Operand->getParent(); 684 int Idx = UseMI->findRegisterDefOperandIdx(NewReg, false, true, TRI); 685 if (Idx == -1) 686 continue; 687 688 if (UseMI->getOperand(Idx).isEarlyClobber()) { 689 DEBUG(dbgs() << "(ec)"); 690 goto next_super_reg; 691 } 692 } 693 694 // Also, we cannot rename 'Reg' to 'NewReg' if the instruction defining 695 // 'Reg' is an early-clobber define and that instruction also uses 696 // 'NewReg'. 697 for (const auto &Q : make_range(RegRefs.equal_range(Reg))) { 698 if (!Q.second.Operand->isDef() || !Q.second.Operand->isEarlyClobber()) 699 continue; 700 701 MachineInstr *DefMI = Q.second.Operand->getParent(); 702 if (DefMI->readsRegister(NewReg, TRI)) { 703 DEBUG(dbgs() << "(ec)"); 704 goto next_super_reg; 705 } 706 } 707 708 // Record that 'Reg' can be renamed to 'NewReg'. 709 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg)); 710 } 711 712 // If we fall-out here, then every register in the group can be 713 // renamed, as recorded in RenameMap. 714 RenameOrder.erase(SuperRC); 715 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R)); 716 DEBUG(dbgs() << "]\n"); 717 return true; 718 719 next_super_reg: 720 DEBUG(dbgs() << ']'); 721 } while (R != EndR); 722 723 DEBUG(dbgs() << '\n'); 724 725 // No registers are free and available! 726 return false; 727 } 728 729 /// BreakAntiDependencies - Identifiy anti-dependencies within the 730 /// ScheduleDAG and break them by renaming registers. 731 /// 732 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies( 733 const std::vector<SUnit>& SUnits, 734 MachineBasicBlock::iterator Begin, 735 MachineBasicBlock::iterator End, 736 unsigned InsertPosIndex, 737 DbgValueVector &DbgValues) { 738 739 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 740 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 741 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 742 RegRefs = State->GetRegRefs(); 743 744 // The code below assumes that there is at least one instruction, 745 // so just duck out immediately if the block is empty. 746 if (SUnits.empty()) return 0; 747 748 // For each regclass the next register to use for renaming. 749 RenameOrderType RenameOrder; 750 751 // ...need a map from MI to SUnit. 752 std::map<MachineInstr *, const SUnit *> MISUnitMap; 753 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 754 const SUnit *SU = &SUnits[i]; 755 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(), 756 SU)); 757 } 758 759 // Track progress along the critical path through the SUnit graph as 760 // we walk the instructions. This is needed for regclasses that only 761 // break critical-path anti-dependencies. 762 const SUnit *CriticalPathSU = nullptr; 763 MachineInstr *CriticalPathMI = nullptr; 764 if (CriticalPathSet.any()) { 765 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 766 const SUnit *SU = &SUnits[i]; 767 if (!CriticalPathSU || 768 ((SU->getDepth() + SU->Latency) > 769 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) { 770 CriticalPathSU = SU; 771 } 772 } 773 774 CriticalPathMI = CriticalPathSU->getInstr(); 775 } 776 777 #ifndef NDEBUG 778 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n"); 779 DEBUG(dbgs() << "Available regs:"); 780 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { 781 if (!State->IsLive(Reg)) 782 DEBUG(dbgs() << " " << TRI->getName(Reg)); 783 } 784 DEBUG(dbgs() << '\n'); 785 #endif 786 787 // Attempt to break anti-dependence edges. Walk the instructions 788 // from the bottom up, tracking information about liveness as we go 789 // to help determine which registers are available. 790 unsigned Broken = 0; 791 unsigned Count = InsertPosIndex - 1; 792 for (MachineBasicBlock::iterator I = End, E = Begin; 793 I != E; --Count) { 794 MachineInstr &MI = *--I; 795 796 if (MI.isDebugValue()) 797 continue; 798 799 DEBUG(dbgs() << "Anti: "); 800 DEBUG(MI.dump()); 801 802 std::set<unsigned> PassthruRegs; 803 GetPassthruRegs(MI, PassthruRegs); 804 805 // Process the defs in MI... 806 PrescanInstruction(MI, Count, PassthruRegs); 807 808 // The dependence edges that represent anti- and output- 809 // dependencies that are candidates for breaking. 810 std::vector<const SDep *> Edges; 811 const SUnit *PathSU = MISUnitMap[&MI]; 812 AntiDepEdges(PathSU, Edges); 813 814 // If MI is not on the critical path, then we don't rename 815 // registers in the CriticalPathSet. 816 BitVector *ExcludeRegs = nullptr; 817 if (&MI == CriticalPathMI) { 818 CriticalPathSU = CriticalPathStep(CriticalPathSU); 819 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr; 820 } else if (CriticalPathSet.any()) { 821 ExcludeRegs = &CriticalPathSet; 822 } 823 824 // Ignore KILL instructions (they form a group in ScanInstruction 825 // but don't cause any anti-dependence breaking themselves) 826 if (!MI.isKill()) { 827 // Attempt to break each anti-dependency... 828 for (unsigned i = 0, e = Edges.size(); i != e; ++i) { 829 const SDep *Edge = Edges[i]; 830 SUnit *NextSU = Edge->getSUnit(); 831 832 if ((Edge->getKind() != SDep::Anti) && 833 (Edge->getKind() != SDep::Output)) continue; 834 835 unsigned AntiDepReg = Edge->getReg(); 836 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); 837 assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); 838 839 if (!MRI.isAllocatable(AntiDepReg)) { 840 // Don't break anti-dependencies on non-allocatable registers. 841 DEBUG(dbgs() << " (non-allocatable)\n"); 842 continue; 843 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) { 844 // Don't break anti-dependencies for critical path registers 845 // if not on the critical path 846 DEBUG(dbgs() << " (not critical-path)\n"); 847 continue; 848 } else if (PassthruRegs.count(AntiDepReg) != 0) { 849 // If the anti-dep register liveness "passes-thru", then 850 // don't try to change it. It will be changed along with 851 // the use if required to break an earlier antidep. 852 DEBUG(dbgs() << " (passthru)\n"); 853 continue; 854 } else { 855 // No anti-dep breaking for implicit deps 856 MachineOperand *AntiDepOp = MI.findRegisterDefOperand(AntiDepReg); 857 assert(AntiDepOp && "Can't find index for defined register operand"); 858 if (!AntiDepOp || AntiDepOp->isImplicit()) { 859 DEBUG(dbgs() << " (implicit)\n"); 860 continue; 861 } 862 863 // If the SUnit has other dependencies on the SUnit that 864 // it anti-depends on, don't bother breaking the 865 // anti-dependency since those edges would prevent such 866 // units from being scheduled past each other 867 // regardless. 868 // 869 // Also, if there are dependencies on other SUnits with the 870 // same register as the anti-dependency, don't attempt to 871 // break it. 872 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 873 PE = PathSU->Preds.end(); P != PE; ++P) { 874 if (P->getSUnit() == NextSU ? 875 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) : 876 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) { 877 AntiDepReg = 0; 878 break; 879 } 880 } 881 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 882 PE = PathSU->Preds.end(); P != PE; ++P) { 883 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) && 884 (P->getKind() != SDep::Output)) { 885 DEBUG(dbgs() << " (real dependency)\n"); 886 AntiDepReg = 0; 887 break; 888 } else if ((P->getSUnit() != NextSU) && 889 (P->getKind() == SDep::Data) && 890 (P->getReg() == AntiDepReg)) { 891 DEBUG(dbgs() << " (other dependency)\n"); 892 AntiDepReg = 0; 893 break; 894 } 895 } 896 897 if (AntiDepReg == 0) continue; 898 } 899 900 assert(AntiDepReg != 0); 901 if (AntiDepReg == 0) continue; 902 903 // Determine AntiDepReg's register group. 904 const unsigned GroupIndex = State->GetGroup(AntiDepReg); 905 if (GroupIndex == 0) { 906 DEBUG(dbgs() << " (zero group)\n"); 907 continue; 908 } 909 910 DEBUG(dbgs() << '\n'); 911 912 // Look for a suitable register to use to break the anti-dependence. 913 std::map<unsigned, unsigned> RenameMap; 914 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) { 915 DEBUG(dbgs() << "\tBreaking anti-dependence edge on " 916 << TRI->getName(AntiDepReg) << ":"); 917 918 // Handle each group register... 919 for (std::map<unsigned, unsigned>::iterator 920 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { 921 unsigned CurrReg = S->first; 922 unsigned NewReg = S->second; 923 924 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" << 925 TRI->getName(NewReg) << "(" << 926 RegRefs.count(CurrReg) << " refs)"); 927 928 // Update the references to the old register CurrReg to 929 // refer to the new register NewReg. 930 for (const auto &Q : make_range(RegRefs.equal_range(CurrReg))) { 931 Q.second.Operand->setReg(NewReg); 932 // If the SU for the instruction being updated has debug 933 // information related to the anti-dependency register, make 934 // sure to update that as well. 935 const SUnit *SU = MISUnitMap[Q.second.Operand->getParent()]; 936 if (!SU) continue; 937 for (DbgValueVector::iterator DVI = DbgValues.begin(), 938 DVE = DbgValues.end(); DVI != DVE; ++DVI) 939 if (DVI->second == Q.second.Operand->getParent()) 940 UpdateDbgValue(*DVI->first, AntiDepReg, NewReg); 941 } 942 943 // We just went back in time and modified history; the 944 // liveness information for CurrReg is now inconsistent. Set 945 // the state as if it were dead. 946 State->UnionGroups(NewReg, 0); 947 RegRefs.erase(NewReg); 948 DefIndices[NewReg] = DefIndices[CurrReg]; 949 KillIndices[NewReg] = KillIndices[CurrReg]; 950 951 State->UnionGroups(CurrReg, 0); 952 RegRefs.erase(CurrReg); 953 DefIndices[CurrReg] = KillIndices[CurrReg]; 954 KillIndices[CurrReg] = ~0u; 955 assert(((KillIndices[CurrReg] == ~0u) != 956 (DefIndices[CurrReg] == ~0u)) && 957 "Kill and Def maps aren't consistent for AntiDepReg!"); 958 } 959 960 ++Broken; 961 DEBUG(dbgs() << '\n'); 962 } 963 } 964 } 965 966 ScanInstruction(MI, Count); 967 } 968 969 return Broken; 970 } 971