1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the AggressiveAntiDepBreaker class, which 11 // implements register anti-dependence breaking during post-RA 12 // scheduling. It attempts to break all anti-dependencies within a 13 // block. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #include "AggressiveAntiDepBreaker.h" 18 #include "llvm/CodeGen/MachineBasicBlock.h" 19 #include "llvm/CodeGen/MachineFrameInfo.h" 20 #include "llvm/CodeGen/MachineInstr.h" 21 #include "llvm/CodeGen/RegisterClassInfo.h" 22 #include "llvm/Support/CommandLine.h" 23 #include "llvm/Support/Debug.h" 24 #include "llvm/Support/ErrorHandling.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include "llvm/Target/TargetInstrInfo.h" 27 #include "llvm/Target/TargetRegisterInfo.h" 28 using namespace llvm; 29 30 #define DEBUG_TYPE "post-RA-sched" 31 32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod 33 static cl::opt<int> 34 DebugDiv("agg-antidep-debugdiv", 35 cl::desc("Debug control for aggressive anti-dep breaker"), 36 cl::init(0), cl::Hidden); 37 static cl::opt<int> 38 DebugMod("agg-antidep-debugmod", 39 cl::desc("Debug control for aggressive anti-dep breaker"), 40 cl::init(0), cl::Hidden); 41 42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs, 43 MachineBasicBlock *BB) : 44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0), 45 GroupNodeIndices(TargetRegs, 0), 46 KillIndices(TargetRegs, 0), 47 DefIndices(TargetRegs, 0) 48 { 49 const unsigned BBSize = BB->size(); 50 for (unsigned i = 0; i < NumTargetRegs; ++i) { 51 // Initialize all registers to be in their own group. Initially we 52 // assign the register to the same-indexed GroupNode. 53 GroupNodeIndices[i] = i; 54 // Initialize the indices to indicate that no registers are live. 55 KillIndices[i] = ~0u; 56 DefIndices[i] = BBSize; 57 } 58 } 59 60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) { 61 unsigned Node = GroupNodeIndices[Reg]; 62 while (GroupNodes[Node] != Node) 63 Node = GroupNodes[Node]; 64 65 return Node; 66 } 67 68 void AggressiveAntiDepState::GetGroupRegs( 69 unsigned Group, 70 std::vector<unsigned> &Regs, 71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs) 72 { 73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) { 74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0)) 75 Regs.push_back(Reg); 76 } 77 } 78 79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2) 80 { 81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); 82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); 83 84 // find group for each register 85 unsigned Group1 = GetGroup(Reg1); 86 unsigned Group2 = GetGroup(Reg2); 87 88 // if either group is 0, then that must become the parent 89 unsigned Parent = (Group1 == 0) ? Group1 : Group2; 90 unsigned Other = (Parent == Group1) ? Group2 : Group1; 91 GroupNodes.at(Other) = Parent; 92 return Parent; 93 } 94 95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg) 96 { 97 // Create a new GroupNode for Reg. Reg's existing GroupNode must 98 // stay as is because there could be other GroupNodes referring to 99 // it. 100 unsigned idx = GroupNodes.size(); 101 GroupNodes.push_back(idx); 102 GroupNodeIndices[Reg] = idx; 103 return idx; 104 } 105 106 bool AggressiveAntiDepState::IsLive(unsigned Reg) 107 { 108 // KillIndex must be defined and DefIndex not defined for a register 109 // to be live. 110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); 111 } 112 113 AggressiveAntiDepBreaker::AggressiveAntiDepBreaker( 114 MachineFunction &MFi, const RegisterClassInfo &RCI, 115 TargetSubtargetInfo::RegClassVector &CriticalPathRCs) 116 : AntiDepBreaker(), MF(MFi), MRI(MF.getRegInfo()), 117 TII(MF.getSubtarget().getInstrInfo()), 118 TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI), 119 State(nullptr) { 120 /* Collect a bitset of all registers that are only broken if they 121 are on the critical path. */ 122 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) { 123 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]); 124 if (CriticalPathSet.none()) 125 CriticalPathSet = CPSet; 126 else 127 CriticalPathSet |= CPSet; 128 } 129 130 DEBUG(dbgs() << "AntiDep Critical-Path Registers:"); 131 DEBUG(for (unsigned r : CriticalPathSet.set_bits()) 132 dbgs() << " " << TRI->getName(r)); 133 DEBUG(dbgs() << '\n'); 134 } 135 136 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { 137 delete State; 138 } 139 140 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { 141 assert(!State); 142 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB); 143 144 bool IsReturnBlock = BB->isReturnBlock(); 145 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 146 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 147 148 // Examine the live-in regs of all successors. 149 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), 150 SE = BB->succ_end(); SI != SE; ++SI) 151 for (const auto &LI : (*SI)->liveins()) { 152 for (MCRegAliasIterator AI(LI.PhysReg, TRI, true); AI.isValid(); ++AI) { 153 unsigned Reg = *AI; 154 State->UnionGroups(Reg, 0); 155 KillIndices[Reg] = BB->size(); 156 DefIndices[Reg] = ~0u; 157 } 158 } 159 160 // Mark live-out callee-saved registers. In a return block this is 161 // all callee-saved registers. In non-return this is any 162 // callee-saved register that is not saved in the prolog. 163 const MachineFrameInfo &MFI = MF.getFrameInfo(); 164 BitVector Pristine = MFI.getPristineRegs(MF); 165 for (const MCPhysReg *I = MF.getRegInfo().getCalleeSavedRegs(); *I; 166 ++I) { 167 unsigned Reg = *I; 168 if (!IsReturnBlock && !Pristine.test(Reg)) 169 continue; 170 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 171 unsigned AliasReg = *AI; 172 State->UnionGroups(AliasReg, 0); 173 KillIndices[AliasReg] = BB->size(); 174 DefIndices[AliasReg] = ~0u; 175 } 176 } 177 } 178 179 void AggressiveAntiDepBreaker::FinishBlock() { 180 delete State; 181 State = nullptr; 182 } 183 184 void AggressiveAntiDepBreaker::Observe(MachineInstr &MI, unsigned Count, 185 unsigned InsertPosIndex) { 186 assert(Count < InsertPosIndex && "Instruction index out of expected range!"); 187 188 std::set<unsigned> PassthruRegs; 189 GetPassthruRegs(MI, PassthruRegs); 190 PrescanInstruction(MI, Count, PassthruRegs); 191 ScanInstruction(MI, Count); 192 193 DEBUG(dbgs() << "Observe: "); 194 DEBUG(MI.dump()); 195 DEBUG(dbgs() << "\tRegs:"); 196 197 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 198 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) { 199 // If Reg is current live, then mark that it can't be renamed as 200 // we don't know the extent of its live-range anymore (now that it 201 // has been scheduled). If it is not live but was defined in the 202 // previous schedule region, then set its def index to the most 203 // conservative location (i.e. the beginning of the previous 204 // schedule region). 205 if (State->IsLive(Reg)) { 206 DEBUG(if (State->GetGroup(Reg) != 0) 207 dbgs() << " " << TRI->getName(Reg) << "=g" << 208 State->GetGroup(Reg) << "->g0(region live-out)"); 209 State->UnionGroups(Reg, 0); 210 } else if ((DefIndices[Reg] < InsertPosIndex) 211 && (DefIndices[Reg] >= Count)) { 212 DefIndices[Reg] = Count; 213 } 214 } 215 DEBUG(dbgs() << '\n'); 216 } 217 218 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr &MI, 219 MachineOperand &MO) { 220 if (!MO.isReg() || !MO.isImplicit()) 221 return false; 222 223 unsigned Reg = MO.getReg(); 224 if (Reg == 0) 225 return false; 226 227 MachineOperand *Op = nullptr; 228 if (MO.isDef()) 229 Op = MI.findRegisterUseOperand(Reg, true); 230 else 231 Op = MI.findRegisterDefOperand(Reg); 232 233 return(Op && Op->isImplicit()); 234 } 235 236 void AggressiveAntiDepBreaker::GetPassthruRegs( 237 MachineInstr &MI, std::set<unsigned> &PassthruRegs) { 238 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 239 MachineOperand &MO = MI.getOperand(i); 240 if (!MO.isReg()) continue; 241 if ((MO.isDef() && MI.isRegTiedToUseOperand(i)) || 242 IsImplicitDefUse(MI, MO)) { 243 const unsigned Reg = MO.getReg(); 244 for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true); 245 SubRegs.isValid(); ++SubRegs) 246 PassthruRegs.insert(*SubRegs); 247 } 248 } 249 } 250 251 /// AntiDepEdges - Return in Edges the anti- and output- dependencies 252 /// in SU that we want to consider for breaking. 253 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) { 254 SmallSet<unsigned, 4> RegSet; 255 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 256 P != PE; ++P) { 257 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) { 258 if (RegSet.insert(P->getReg()).second) 259 Edges.push_back(&*P); 260 } 261 } 262 } 263 264 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up 265 /// critical path. 266 static const SUnit *CriticalPathStep(const SUnit *SU) { 267 const SDep *Next = nullptr; 268 unsigned NextDepth = 0; 269 // Find the predecessor edge with the greatest depth. 270 if (SU) { 271 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 272 P != PE; ++P) { 273 const SUnit *PredSU = P->getSUnit(); 274 unsigned PredLatency = P->getLatency(); 275 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency; 276 // In the case of a latency tie, prefer an anti-dependency edge over 277 // other types of edges. 278 if (NextDepth < PredTotalLatency || 279 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) { 280 NextDepth = PredTotalLatency; 281 Next = &*P; 282 } 283 } 284 } 285 286 return (Next) ? Next->getSUnit() : nullptr; 287 } 288 289 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx, 290 const char *tag, 291 const char *header, 292 const char *footer) { 293 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 294 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 295 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 296 RegRefs = State->GetRegRefs(); 297 298 // FIXME: We must leave subregisters of live super registers as live, so that 299 // we don't clear out the register tracking information for subregisters of 300 // super registers we're still tracking (and with which we're unioning 301 // subregister definitions). 302 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) 303 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) { 304 DEBUG(if (!header && footer) dbgs() << footer); 305 return; 306 } 307 308 if (!State->IsLive(Reg)) { 309 KillIndices[Reg] = KillIdx; 310 DefIndices[Reg] = ~0u; 311 RegRefs.erase(Reg); 312 State->LeaveGroup(Reg); 313 DEBUG(if (header) { 314 dbgs() << header << TRI->getName(Reg); header = nullptr; }); 315 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag); 316 // Repeat for subregisters. Note that we only do this if the superregister 317 // was not live because otherwise, regardless whether we have an explicit 318 // use of the subregister, the subregister's contents are needed for the 319 // uses of the superregister. 320 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { 321 unsigned SubregReg = *SubRegs; 322 if (!State->IsLive(SubregReg)) { 323 KillIndices[SubregReg] = KillIdx; 324 DefIndices[SubregReg] = ~0u; 325 RegRefs.erase(SubregReg); 326 State->LeaveGroup(SubregReg); 327 DEBUG(if (header) { 328 dbgs() << header << TRI->getName(Reg); header = nullptr; }); 329 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" << 330 State->GetGroup(SubregReg) << tag); 331 } 332 } 333 } 334 335 DEBUG(if (!header && footer) dbgs() << footer); 336 } 337 338 void AggressiveAntiDepBreaker::PrescanInstruction( 339 MachineInstr &MI, unsigned Count, std::set<unsigned> &PassthruRegs) { 340 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 341 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 342 RegRefs = State->GetRegRefs(); 343 344 // Handle dead defs by simulating a last-use of the register just 345 // after the def. A dead def can occur because the def is truly 346 // dead, or because only a subregister is live at the def. If we 347 // don't do this the dead def will be incorrectly merged into the 348 // previous def. 349 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 350 MachineOperand &MO = MI.getOperand(i); 351 if (!MO.isReg() || !MO.isDef()) continue; 352 unsigned Reg = MO.getReg(); 353 if (Reg == 0) continue; 354 355 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n"); 356 } 357 358 DEBUG(dbgs() << "\tDef Groups:"); 359 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 360 MachineOperand &MO = MI.getOperand(i); 361 if (!MO.isReg() || !MO.isDef()) continue; 362 unsigned Reg = MO.getReg(); 363 if (Reg == 0) continue; 364 365 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg)); 366 367 // If MI's defs have a special allocation requirement, don't allow 368 // any def registers to be changed. Also assume all registers 369 // defined in a call must not be changed (ABI). Inline assembly may 370 // reference either system calls or the register directly. Skip it until we 371 // can tell user specified registers from compiler-specified. 372 if (MI.isCall() || MI.hasExtraDefRegAllocReq() || TII->isPredicated(MI) || 373 MI.isInlineAsm()) { 374 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 375 State->UnionGroups(Reg, 0); 376 } 377 378 // Any aliased that are live at this point are completely or 379 // partially defined here, so group those aliases with Reg. 380 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) { 381 unsigned AliasReg = *AI; 382 if (State->IsLive(AliasReg)) { 383 State->UnionGroups(Reg, AliasReg); 384 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " << 385 TRI->getName(AliasReg) << ")"); 386 } 387 } 388 389 // Note register reference... 390 const TargetRegisterClass *RC = nullptr; 391 if (i < MI.getDesc().getNumOperands()) 392 RC = TII->getRegClass(MI.getDesc(), i, TRI, MF); 393 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 394 RegRefs.insert(std::make_pair(Reg, RR)); 395 } 396 397 DEBUG(dbgs() << '\n'); 398 399 // Scan the register defs for this instruction and update 400 // live-ranges. 401 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 402 MachineOperand &MO = MI.getOperand(i); 403 if (!MO.isReg() || !MO.isDef()) continue; 404 unsigned Reg = MO.getReg(); 405 if (Reg == 0) continue; 406 // Ignore KILLs and passthru registers for liveness... 407 if (MI.isKill() || (PassthruRegs.count(Reg) != 0)) 408 continue; 409 410 // Update def for Reg and aliases. 411 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 412 // We need to be careful here not to define already-live super registers. 413 // If the super register is already live, then this definition is not 414 // a definition of the whole super register (just a partial insertion 415 // into it). Earlier subregister definitions (which we've not yet visited 416 // because we're iterating bottom-up) need to be linked to the same group 417 // as this definition. 418 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) 419 continue; 420 421 DefIndices[*AI] = Count; 422 } 423 } 424 } 425 426 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr &MI, 427 unsigned Count) { 428 DEBUG(dbgs() << "\tUse Groups:"); 429 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 430 RegRefs = State->GetRegRefs(); 431 432 // If MI's uses have special allocation requirement, don't allow 433 // any use registers to be changed. Also assume all registers 434 // used in a call must not be changed (ABI). 435 // Inline Assembly register uses also cannot be safely changed. 436 // FIXME: The issue with predicated instruction is more complex. We are being 437 // conservatively here because the kill markers cannot be trusted after 438 // if-conversion: 439 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14] 440 // ... 441 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395] 442 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12] 443 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8) 444 // 445 // The first R6 kill is not really a kill since it's killed by a predicated 446 // instruction which may not be executed. The second R6 def may or may not 447 // re-define R6 so it's not safe to change it since the last R6 use cannot be 448 // changed. 449 bool Special = MI.isCall() || MI.hasExtraSrcRegAllocReq() || 450 TII->isPredicated(MI) || MI.isInlineAsm(); 451 452 // Scan the register uses for this instruction and update 453 // live-ranges, groups and RegRefs. 454 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 455 MachineOperand &MO = MI.getOperand(i); 456 if (!MO.isReg() || !MO.isUse()) continue; 457 unsigned Reg = MO.getReg(); 458 if (Reg == 0) continue; 459 460 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << 461 State->GetGroup(Reg)); 462 463 // It wasn't previously live but now it is, this is a kill. Forget 464 // the previous live-range information and start a new live-range 465 // for the register. 466 HandleLastUse(Reg, Count, "(last-use)"); 467 468 if (Special) { 469 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 470 State->UnionGroups(Reg, 0); 471 } 472 473 // Note register reference... 474 const TargetRegisterClass *RC = nullptr; 475 if (i < MI.getDesc().getNumOperands()) 476 RC = TII->getRegClass(MI.getDesc(), i, TRI, MF); 477 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 478 RegRefs.insert(std::make_pair(Reg, RR)); 479 } 480 481 DEBUG(dbgs() << '\n'); 482 483 // Form a group of all defs and uses of a KILL instruction to ensure 484 // that all registers are renamed as a group. 485 if (MI.isKill()) { 486 DEBUG(dbgs() << "\tKill Group:"); 487 488 unsigned FirstReg = 0; 489 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 490 MachineOperand &MO = MI.getOperand(i); 491 if (!MO.isReg()) continue; 492 unsigned Reg = MO.getReg(); 493 if (Reg == 0) continue; 494 495 if (FirstReg != 0) { 496 DEBUG(dbgs() << "=" << TRI->getName(Reg)); 497 State->UnionGroups(FirstReg, Reg); 498 } else { 499 DEBUG(dbgs() << " " << TRI->getName(Reg)); 500 FirstReg = Reg; 501 } 502 } 503 504 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n'); 505 } 506 } 507 508 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { 509 BitVector BV(TRI->getNumRegs(), false); 510 bool first = true; 511 512 // Check all references that need rewriting for Reg. For each, use 513 // the corresponding register class to narrow the set of registers 514 // that are appropriate for renaming. 515 for (const auto &Q : make_range(State->GetRegRefs().equal_range(Reg))) { 516 const TargetRegisterClass *RC = Q.second.RC; 517 if (!RC) continue; 518 519 BitVector RCBV = TRI->getAllocatableSet(MF, RC); 520 if (first) { 521 BV |= RCBV; 522 first = false; 523 } else { 524 BV &= RCBV; 525 } 526 527 DEBUG(dbgs() << " " << TRI->getRegClassName(RC)); 528 } 529 530 return BV; 531 } 532 533 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( 534 unsigned AntiDepGroupIndex, 535 RenameOrderType& RenameOrder, 536 std::map<unsigned, unsigned> &RenameMap) { 537 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 538 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 539 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 540 RegRefs = State->GetRegRefs(); 541 542 // Collect all referenced registers in the same group as 543 // AntiDepReg. These all need to be renamed together if we are to 544 // break the anti-dependence. 545 std::vector<unsigned> Regs; 546 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs); 547 assert(Regs.size() > 0 && "Empty register group!"); 548 if (Regs.size() == 0) 549 return false; 550 551 // Find the "superest" register in the group. At the same time, 552 // collect the BitVector of registers that can be used to rename 553 // each register. 554 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex 555 << ":\n"); 556 std::map<unsigned, BitVector> RenameRegisterMap; 557 unsigned SuperReg = 0; 558 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 559 unsigned Reg = Regs[i]; 560 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) 561 SuperReg = Reg; 562 563 // If Reg has any references, then collect possible rename regs 564 if (RegRefs.count(Reg) > 0) { 565 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":"); 566 567 BitVector &BV = RenameRegisterMap[Reg]; 568 assert(BV.empty()); 569 BV = GetRenameRegisters(Reg); 570 571 DEBUG({ 572 dbgs() << " ::"; 573 for (unsigned r : BV.set_bits()) 574 dbgs() << " " << TRI->getName(r); 575 dbgs() << "\n"; 576 }); 577 } 578 } 579 580 // All group registers should be a subreg of SuperReg. 581 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 582 unsigned Reg = Regs[i]; 583 if (Reg == SuperReg) continue; 584 bool IsSub = TRI->isSubRegister(SuperReg, Reg); 585 // FIXME: remove this once PR18663 has been properly fixed. For now, 586 // return a conservative answer: 587 // assert(IsSub && "Expecting group subregister"); 588 if (!IsSub) 589 return false; 590 } 591 592 #ifndef NDEBUG 593 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod 594 if (DebugDiv > 0) { 595 static int renamecnt = 0; 596 if (renamecnt++ % DebugDiv != DebugMod) 597 return false; 598 599 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) << 600 " for debug ***\n"; 601 } 602 #endif 603 604 // Check each possible rename register for SuperReg in round-robin 605 // order. If that register is available, and the corresponding 606 // registers are available for the other group subregisters, then we 607 // can use those registers to rename. 608 609 // FIXME: Using getMinimalPhysRegClass is very conservative. We should 610 // check every use of the register and find the largest register class 611 // that can be used in all of them. 612 const TargetRegisterClass *SuperRC = 613 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other); 614 615 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC); 616 if (Order.empty()) { 617 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n"); 618 return false; 619 } 620 621 DEBUG(dbgs() << "\tFind Registers:"); 622 623 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size())); 624 625 unsigned OrigR = RenameOrder[SuperRC]; 626 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR); 627 unsigned R = OrigR; 628 do { 629 if (R == 0) R = Order.size(); 630 --R; 631 const unsigned NewSuperReg = Order[R]; 632 // Don't consider non-allocatable registers 633 if (!MRI.isAllocatable(NewSuperReg)) continue; 634 // Don't replace a register with itself. 635 if (NewSuperReg == SuperReg) continue; 636 637 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':'); 638 RenameMap.clear(); 639 640 // For each referenced group register (which must be a SuperReg or 641 // a subregister of SuperReg), find the corresponding subregister 642 // of NewSuperReg and make sure it is free to be renamed. 643 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 644 unsigned Reg = Regs[i]; 645 unsigned NewReg = 0; 646 if (Reg == SuperReg) { 647 NewReg = NewSuperReg; 648 } else { 649 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg); 650 if (NewSubRegIdx != 0) 651 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx); 652 } 653 654 DEBUG(dbgs() << " " << TRI->getName(NewReg)); 655 656 // Check if Reg can be renamed to NewReg. 657 if (!RenameRegisterMap[Reg].test(NewReg)) { 658 DEBUG(dbgs() << "(no rename)"); 659 goto next_super_reg; 660 } 661 662 // If NewReg is dead and NewReg's most recent def is not before 663 // Regs's kill, it's safe to replace Reg with NewReg. We 664 // must also check all aliases of NewReg, because we can't define a 665 // register when any sub or super is already live. 666 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) { 667 DEBUG(dbgs() << "(live)"); 668 goto next_super_reg; 669 } else { 670 bool found = false; 671 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) { 672 unsigned AliasReg = *AI; 673 if (State->IsLive(AliasReg) || 674 (KillIndices[Reg] > DefIndices[AliasReg])) { 675 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)"); 676 found = true; 677 break; 678 } 679 } 680 if (found) 681 goto next_super_reg; 682 } 683 684 // We cannot rename 'Reg' to 'NewReg' if one of the uses of 'Reg' also 685 // defines 'NewReg' via an early-clobber operand. 686 for (const auto &Q : make_range(RegRefs.equal_range(Reg))) { 687 MachineInstr *UseMI = Q.second.Operand->getParent(); 688 int Idx = UseMI->findRegisterDefOperandIdx(NewReg, false, true, TRI); 689 if (Idx == -1) 690 continue; 691 692 if (UseMI->getOperand(Idx).isEarlyClobber()) { 693 DEBUG(dbgs() << "(ec)"); 694 goto next_super_reg; 695 } 696 } 697 698 // Also, we cannot rename 'Reg' to 'NewReg' if the instruction defining 699 // 'Reg' is an early-clobber define and that instruction also uses 700 // 'NewReg'. 701 for (const auto &Q : make_range(RegRefs.equal_range(Reg))) { 702 if (!Q.second.Operand->isDef() || !Q.second.Operand->isEarlyClobber()) 703 continue; 704 705 MachineInstr *DefMI = Q.second.Operand->getParent(); 706 if (DefMI->readsRegister(NewReg, TRI)) { 707 DEBUG(dbgs() << "(ec)"); 708 goto next_super_reg; 709 } 710 } 711 712 // Record that 'Reg' can be renamed to 'NewReg'. 713 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg)); 714 } 715 716 // If we fall-out here, then every register in the group can be 717 // renamed, as recorded in RenameMap. 718 RenameOrder.erase(SuperRC); 719 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R)); 720 DEBUG(dbgs() << "]\n"); 721 return true; 722 723 next_super_reg: 724 DEBUG(dbgs() << ']'); 725 } while (R != EndR); 726 727 DEBUG(dbgs() << '\n'); 728 729 // No registers are free and available! 730 return false; 731 } 732 733 /// BreakAntiDependencies - Identifiy anti-dependencies within the 734 /// ScheduleDAG and break them by renaming registers. 735 /// 736 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies( 737 const std::vector<SUnit>& SUnits, 738 MachineBasicBlock::iterator Begin, 739 MachineBasicBlock::iterator End, 740 unsigned InsertPosIndex, 741 DbgValueVector &DbgValues) { 742 743 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 744 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 745 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 746 RegRefs = State->GetRegRefs(); 747 748 // The code below assumes that there is at least one instruction, 749 // so just duck out immediately if the block is empty. 750 if (SUnits.empty()) return 0; 751 752 // For each regclass the next register to use for renaming. 753 RenameOrderType RenameOrder; 754 755 // ...need a map from MI to SUnit. 756 std::map<MachineInstr *, const SUnit *> MISUnitMap; 757 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 758 const SUnit *SU = &SUnits[i]; 759 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(), 760 SU)); 761 } 762 763 // Track progress along the critical path through the SUnit graph as 764 // we walk the instructions. This is needed for regclasses that only 765 // break critical-path anti-dependencies. 766 const SUnit *CriticalPathSU = nullptr; 767 MachineInstr *CriticalPathMI = nullptr; 768 if (CriticalPathSet.any()) { 769 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 770 const SUnit *SU = &SUnits[i]; 771 if (!CriticalPathSU || 772 ((SU->getDepth() + SU->Latency) > 773 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) { 774 CriticalPathSU = SU; 775 } 776 } 777 778 CriticalPathMI = CriticalPathSU->getInstr(); 779 } 780 781 #ifndef NDEBUG 782 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n"); 783 DEBUG(dbgs() << "Available regs:"); 784 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { 785 if (!State->IsLive(Reg)) 786 DEBUG(dbgs() << " " << TRI->getName(Reg)); 787 } 788 DEBUG(dbgs() << '\n'); 789 #endif 790 791 BitVector RegAliases(TRI->getNumRegs()); 792 793 // Attempt to break anti-dependence edges. Walk the instructions 794 // from the bottom up, tracking information about liveness as we go 795 // to help determine which registers are available. 796 unsigned Broken = 0; 797 unsigned Count = InsertPosIndex - 1; 798 for (MachineBasicBlock::iterator I = End, E = Begin; 799 I != E; --Count) { 800 MachineInstr &MI = *--I; 801 802 if (MI.isDebugValue()) 803 continue; 804 805 DEBUG(dbgs() << "Anti: "); 806 DEBUG(MI.dump()); 807 808 std::set<unsigned> PassthruRegs; 809 GetPassthruRegs(MI, PassthruRegs); 810 811 // Process the defs in MI... 812 PrescanInstruction(MI, Count, PassthruRegs); 813 814 // The dependence edges that represent anti- and output- 815 // dependencies that are candidates for breaking. 816 std::vector<const SDep *> Edges; 817 const SUnit *PathSU = MISUnitMap[&MI]; 818 AntiDepEdges(PathSU, Edges); 819 820 // If MI is not on the critical path, then we don't rename 821 // registers in the CriticalPathSet. 822 BitVector *ExcludeRegs = nullptr; 823 if (&MI == CriticalPathMI) { 824 CriticalPathSU = CriticalPathStep(CriticalPathSU); 825 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr; 826 } else if (CriticalPathSet.any()) { 827 ExcludeRegs = &CriticalPathSet; 828 } 829 830 // Ignore KILL instructions (they form a group in ScanInstruction 831 // but don't cause any anti-dependence breaking themselves) 832 if (!MI.isKill()) { 833 // Attempt to break each anti-dependency... 834 for (unsigned i = 0, e = Edges.size(); i != e; ++i) { 835 const SDep *Edge = Edges[i]; 836 SUnit *NextSU = Edge->getSUnit(); 837 838 if ((Edge->getKind() != SDep::Anti) && 839 (Edge->getKind() != SDep::Output)) continue; 840 841 unsigned AntiDepReg = Edge->getReg(); 842 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); 843 assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); 844 845 if (!MRI.isAllocatable(AntiDepReg)) { 846 // Don't break anti-dependencies on non-allocatable registers. 847 DEBUG(dbgs() << " (non-allocatable)\n"); 848 continue; 849 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) { 850 // Don't break anti-dependencies for critical path registers 851 // if not on the critical path 852 DEBUG(dbgs() << " (not critical-path)\n"); 853 continue; 854 } else if (PassthruRegs.count(AntiDepReg) != 0) { 855 // If the anti-dep register liveness "passes-thru", then 856 // don't try to change it. It will be changed along with 857 // the use if required to break an earlier antidep. 858 DEBUG(dbgs() << " (passthru)\n"); 859 continue; 860 } else { 861 // No anti-dep breaking for implicit deps 862 MachineOperand *AntiDepOp = MI.findRegisterDefOperand(AntiDepReg); 863 assert(AntiDepOp && "Can't find index for defined register operand"); 864 if (!AntiDepOp || AntiDepOp->isImplicit()) { 865 DEBUG(dbgs() << " (implicit)\n"); 866 continue; 867 } 868 869 // If the SUnit has other dependencies on the SUnit that 870 // it anti-depends on, don't bother breaking the 871 // anti-dependency since those edges would prevent such 872 // units from being scheduled past each other 873 // regardless. 874 // 875 // Also, if there are dependencies on other SUnits with the 876 // same register as the anti-dependency, don't attempt to 877 // break it. 878 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 879 PE = PathSU->Preds.end(); P != PE; ++P) { 880 if (P->getSUnit() == NextSU ? 881 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) : 882 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) { 883 AntiDepReg = 0; 884 break; 885 } 886 } 887 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 888 PE = PathSU->Preds.end(); P != PE; ++P) { 889 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) && 890 (P->getKind() != SDep::Output)) { 891 DEBUG(dbgs() << " (real dependency)\n"); 892 AntiDepReg = 0; 893 break; 894 } else if ((P->getSUnit() != NextSU) && 895 (P->getKind() == SDep::Data) && 896 (P->getReg() == AntiDepReg)) { 897 DEBUG(dbgs() << " (other dependency)\n"); 898 AntiDepReg = 0; 899 break; 900 } 901 } 902 903 if (AntiDepReg == 0) continue; 904 905 // If the definition of the anti-dependency register does not start 906 // a new live range, bail out. This can happen if the anti-dep 907 // register is a sub-register of another register whose live range 908 // spans over PathSU. In such case, PathSU defines only a part of 909 // the larger register. 910 RegAliases.reset(); 911 for (MCRegAliasIterator AI(AntiDepReg, TRI, true); AI.isValid(); ++AI) 912 RegAliases.set(*AI); 913 for (SDep S : PathSU->Succs) { 914 SDep::Kind K = S.getKind(); 915 if (K != SDep::Data && K != SDep::Output && K != SDep::Anti) 916 continue; 917 unsigned R = S.getReg(); 918 if (!RegAliases[R]) 919 continue; 920 if (R == AntiDepReg || TRI->isSubRegister(AntiDepReg, R)) 921 continue; 922 AntiDepReg = 0; 923 break; 924 } 925 926 if (AntiDepReg == 0) continue; 927 } 928 929 assert(AntiDepReg != 0); 930 if (AntiDepReg == 0) continue; 931 932 // Determine AntiDepReg's register group. 933 const unsigned GroupIndex = State->GetGroup(AntiDepReg); 934 if (GroupIndex == 0) { 935 DEBUG(dbgs() << " (zero group)\n"); 936 continue; 937 } 938 939 DEBUG(dbgs() << '\n'); 940 941 // Look for a suitable register to use to break the anti-dependence. 942 std::map<unsigned, unsigned> RenameMap; 943 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) { 944 DEBUG(dbgs() << "\tBreaking anti-dependence edge on " 945 << TRI->getName(AntiDepReg) << ":"); 946 947 // Handle each group register... 948 for (std::map<unsigned, unsigned>::iterator 949 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { 950 unsigned CurrReg = S->first; 951 unsigned NewReg = S->second; 952 953 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" << 954 TRI->getName(NewReg) << "(" << 955 RegRefs.count(CurrReg) << " refs)"); 956 957 // Update the references to the old register CurrReg to 958 // refer to the new register NewReg. 959 for (const auto &Q : make_range(RegRefs.equal_range(CurrReg))) { 960 Q.second.Operand->setReg(NewReg); 961 // If the SU for the instruction being updated has debug 962 // information related to the anti-dependency register, make 963 // sure to update that as well. 964 const SUnit *SU = MISUnitMap[Q.second.Operand->getParent()]; 965 if (!SU) continue; 966 UpdateDbgValues(DbgValues, Q.second.Operand->getParent(), 967 AntiDepReg, NewReg); 968 } 969 970 // We just went back in time and modified history; the 971 // liveness information for CurrReg is now inconsistent. Set 972 // the state as if it were dead. 973 State->UnionGroups(NewReg, 0); 974 RegRefs.erase(NewReg); 975 DefIndices[NewReg] = DefIndices[CurrReg]; 976 KillIndices[NewReg] = KillIndices[CurrReg]; 977 978 State->UnionGroups(CurrReg, 0); 979 RegRefs.erase(CurrReg); 980 DefIndices[CurrReg] = KillIndices[CurrReg]; 981 KillIndices[CurrReg] = ~0u; 982 assert(((KillIndices[CurrReg] == ~0u) != 983 (DefIndices[CurrReg] == ~0u)) && 984 "Kill and Def maps aren't consistent for AntiDepReg!"); 985 } 986 987 ++Broken; 988 DEBUG(dbgs() << '\n'); 989 } 990 } 991 } 992 993 ScanInstruction(MI, Count); 994 } 995 996 return Broken; 997 } 998