1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the AggressiveAntiDepBreaker class, which 11 // implements register anti-dependence breaking during post-RA 12 // scheduling. It attempts to break all anti-dependencies within a 13 // block. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #include "AggressiveAntiDepBreaker.h" 18 #include "llvm/CodeGen/MachineBasicBlock.h" 19 #include "llvm/CodeGen/MachineFrameInfo.h" 20 #include "llvm/CodeGen/MachineInstr.h" 21 #include "llvm/CodeGen/RegisterClassInfo.h" 22 #include "llvm/Support/CommandLine.h" 23 #include "llvm/Support/Debug.h" 24 #include "llvm/Support/ErrorHandling.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include "llvm/Target/TargetInstrInfo.h" 27 #include "llvm/Target/TargetRegisterInfo.h" 28 using namespace llvm; 29 30 #define DEBUG_TYPE "post-RA-sched" 31 32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod 33 static cl::opt<int> 34 DebugDiv("agg-antidep-debugdiv", 35 cl::desc("Debug control for aggressive anti-dep breaker"), 36 cl::init(0), cl::Hidden); 37 static cl::opt<int> 38 DebugMod("agg-antidep-debugmod", 39 cl::desc("Debug control for aggressive anti-dep breaker"), 40 cl::init(0), cl::Hidden); 41 42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs, 43 MachineBasicBlock *BB) : 44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0), 45 GroupNodeIndices(TargetRegs, 0), 46 KillIndices(TargetRegs, 0), 47 DefIndices(TargetRegs, 0) 48 { 49 const unsigned BBSize = BB->size(); 50 for (unsigned i = 0; i < NumTargetRegs; ++i) { 51 // Initialize all registers to be in their own group. Initially we 52 // assign the register to the same-indexed GroupNode. 53 GroupNodeIndices[i] = i; 54 // Initialize the indices to indicate that no registers are live. 55 KillIndices[i] = ~0u; 56 DefIndices[i] = BBSize; 57 } 58 } 59 60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) { 61 unsigned Node = GroupNodeIndices[Reg]; 62 while (GroupNodes[Node] != Node) 63 Node = GroupNodes[Node]; 64 65 return Node; 66 } 67 68 void AggressiveAntiDepState::GetGroupRegs( 69 unsigned Group, 70 std::vector<unsigned> &Regs, 71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs) 72 { 73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) { 74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0)) 75 Regs.push_back(Reg); 76 } 77 } 78 79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2) 80 { 81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); 82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); 83 84 // find group for each register 85 unsigned Group1 = GetGroup(Reg1); 86 unsigned Group2 = GetGroup(Reg2); 87 88 // if either group is 0, then that must become the parent 89 unsigned Parent = (Group1 == 0) ? Group1 : Group2; 90 unsigned Other = (Parent == Group1) ? Group2 : Group1; 91 GroupNodes.at(Other) = Parent; 92 return Parent; 93 } 94 95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg) 96 { 97 // Create a new GroupNode for Reg. Reg's existing GroupNode must 98 // stay as is because there could be other GroupNodes referring to 99 // it. 100 unsigned idx = GroupNodes.size(); 101 GroupNodes.push_back(idx); 102 GroupNodeIndices[Reg] = idx; 103 return idx; 104 } 105 106 bool AggressiveAntiDepState::IsLive(unsigned Reg) 107 { 108 // KillIndex must be defined and DefIndex not defined for a register 109 // to be live. 110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); 111 } 112 113 AggressiveAntiDepBreaker::AggressiveAntiDepBreaker( 114 MachineFunction &MFi, const RegisterClassInfo &RCI, 115 TargetSubtargetInfo::RegClassVector &CriticalPathRCs) 116 : AntiDepBreaker(), MF(MFi), MRI(MF.getRegInfo()), 117 TII(MF.getSubtarget().getInstrInfo()), 118 TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI), 119 State(nullptr) { 120 /* Collect a bitset of all registers that are only broken if they 121 are on the critical path. */ 122 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) { 123 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]); 124 if (CriticalPathSet.none()) 125 CriticalPathSet = CPSet; 126 else 127 CriticalPathSet |= CPSet; 128 } 129 130 DEBUG(dbgs() << "AntiDep Critical-Path Registers:"); 131 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1; 132 r = CriticalPathSet.find_next(r)) 133 dbgs() << " " << TRI->getName(r)); 134 DEBUG(dbgs() << '\n'); 135 } 136 137 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { 138 delete State; 139 } 140 141 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { 142 assert(!State); 143 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB); 144 145 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn()); 146 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 147 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 148 149 // Examine the live-in regs of all successors. 150 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), 151 SE = BB->succ_end(); SI != SE; ++SI) 152 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(), 153 E = (*SI)->livein_end(); I != E; ++I) { 154 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) { 155 unsigned Reg = *AI; 156 State->UnionGroups(Reg, 0); 157 KillIndices[Reg] = BB->size(); 158 DefIndices[Reg] = ~0u; 159 } 160 } 161 162 // Mark live-out callee-saved registers. In a return block this is 163 // all callee-saved registers. In non-return this is any 164 // callee-saved register that is not saved in the prolog. 165 const MachineFrameInfo *MFI = MF.getFrameInfo(); 166 BitVector Pristine = MFI->getPristineRegs(BB); 167 for (const MCPhysReg *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) { 168 unsigned Reg = *I; 169 if (!IsReturnBlock && !Pristine.test(Reg)) continue; 170 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 171 unsigned AliasReg = *AI; 172 State->UnionGroups(AliasReg, 0); 173 KillIndices[AliasReg] = BB->size(); 174 DefIndices[AliasReg] = ~0u; 175 } 176 } 177 } 178 179 void AggressiveAntiDepBreaker::FinishBlock() { 180 delete State; 181 State = nullptr; 182 } 183 184 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count, 185 unsigned InsertPosIndex) { 186 assert(Count < InsertPosIndex && "Instruction index out of expected range!"); 187 188 std::set<unsigned> PassthruRegs; 189 GetPassthruRegs(MI, PassthruRegs); 190 PrescanInstruction(MI, Count, PassthruRegs); 191 ScanInstruction(MI, Count); 192 193 DEBUG(dbgs() << "Observe: "); 194 DEBUG(MI->dump()); 195 DEBUG(dbgs() << "\tRegs:"); 196 197 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 198 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) { 199 // If Reg is current live, then mark that it can't be renamed as 200 // we don't know the extent of its live-range anymore (now that it 201 // has been scheduled). If it is not live but was defined in the 202 // previous schedule region, then set its def index to the most 203 // conservative location (i.e. the beginning of the previous 204 // schedule region). 205 if (State->IsLive(Reg)) { 206 DEBUG(if (State->GetGroup(Reg) != 0) 207 dbgs() << " " << TRI->getName(Reg) << "=g" << 208 State->GetGroup(Reg) << "->g0(region live-out)"); 209 State->UnionGroups(Reg, 0); 210 } else if ((DefIndices[Reg] < InsertPosIndex) 211 && (DefIndices[Reg] >= Count)) { 212 DefIndices[Reg] = Count; 213 } 214 } 215 DEBUG(dbgs() << '\n'); 216 } 217 218 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI, 219 MachineOperand& MO) 220 { 221 if (!MO.isReg() || !MO.isImplicit()) 222 return false; 223 224 unsigned Reg = MO.getReg(); 225 if (Reg == 0) 226 return false; 227 228 MachineOperand *Op = nullptr; 229 if (MO.isDef()) 230 Op = MI->findRegisterUseOperand(Reg, true); 231 else 232 Op = MI->findRegisterDefOperand(Reg); 233 234 return(Op && Op->isImplicit()); 235 } 236 237 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI, 238 std::set<unsigned>& PassthruRegs) { 239 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 240 MachineOperand &MO = MI->getOperand(i); 241 if (!MO.isReg()) continue; 242 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) || 243 IsImplicitDefUse(MI, MO)) { 244 const unsigned Reg = MO.getReg(); 245 for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true); 246 SubRegs.isValid(); ++SubRegs) 247 PassthruRegs.insert(*SubRegs); 248 } 249 } 250 } 251 252 /// AntiDepEdges - Return in Edges the anti- and output- dependencies 253 /// in SU that we want to consider for breaking. 254 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) { 255 SmallSet<unsigned, 4> RegSet; 256 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 257 P != PE; ++P) { 258 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) { 259 if (RegSet.insert(P->getReg())) 260 Edges.push_back(&*P); 261 } 262 } 263 } 264 265 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up 266 /// critical path. 267 static const SUnit *CriticalPathStep(const SUnit *SU) { 268 const SDep *Next = nullptr; 269 unsigned NextDepth = 0; 270 // Find the predecessor edge with the greatest depth. 271 if (SU) { 272 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 273 P != PE; ++P) { 274 const SUnit *PredSU = P->getSUnit(); 275 unsigned PredLatency = P->getLatency(); 276 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency; 277 // In the case of a latency tie, prefer an anti-dependency edge over 278 // other types of edges. 279 if (NextDepth < PredTotalLatency || 280 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) { 281 NextDepth = PredTotalLatency; 282 Next = &*P; 283 } 284 } 285 } 286 287 return (Next) ? Next->getSUnit() : nullptr; 288 } 289 290 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx, 291 const char *tag, 292 const char *header, 293 const char *footer) { 294 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 295 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 296 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 297 RegRefs = State->GetRegRefs(); 298 299 if (!State->IsLive(Reg)) { 300 KillIndices[Reg] = KillIdx; 301 DefIndices[Reg] = ~0u; 302 RegRefs.erase(Reg); 303 State->LeaveGroup(Reg); 304 DEBUG(if (header) { 305 dbgs() << header << TRI->getName(Reg); header = nullptr; }); 306 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag); 307 } 308 // Repeat for subregisters. 309 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { 310 unsigned SubregReg = *SubRegs; 311 if (!State->IsLive(SubregReg)) { 312 KillIndices[SubregReg] = KillIdx; 313 DefIndices[SubregReg] = ~0u; 314 RegRefs.erase(SubregReg); 315 State->LeaveGroup(SubregReg); 316 DEBUG(if (header) { 317 dbgs() << header << TRI->getName(Reg); header = nullptr; }); 318 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" << 319 State->GetGroup(SubregReg) << tag); 320 } 321 } 322 323 DEBUG(if (!header && footer) dbgs() << footer); 324 } 325 326 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI, 327 unsigned Count, 328 std::set<unsigned>& PassthruRegs) { 329 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 330 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 331 RegRefs = State->GetRegRefs(); 332 333 // Handle dead defs by simulating a last-use of the register just 334 // after the def. A dead def can occur because the def is truly 335 // dead, or because only a subregister is live at the def. If we 336 // don't do this the dead def will be incorrectly merged into the 337 // previous def. 338 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 339 MachineOperand &MO = MI->getOperand(i); 340 if (!MO.isReg() || !MO.isDef()) continue; 341 unsigned Reg = MO.getReg(); 342 if (Reg == 0) continue; 343 344 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n"); 345 } 346 347 DEBUG(dbgs() << "\tDef Groups:"); 348 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 349 MachineOperand &MO = MI->getOperand(i); 350 if (!MO.isReg() || !MO.isDef()) continue; 351 unsigned Reg = MO.getReg(); 352 if (Reg == 0) continue; 353 354 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg)); 355 356 // If MI's defs have a special allocation requirement, don't allow 357 // any def registers to be changed. Also assume all registers 358 // defined in a call must not be changed (ABI). 359 if (MI->isCall() || MI->hasExtraDefRegAllocReq() || 360 TII->isPredicated(MI)) { 361 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 362 State->UnionGroups(Reg, 0); 363 } 364 365 // Any aliased that are live at this point are completely or 366 // partially defined here, so group those aliases with Reg. 367 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) { 368 unsigned AliasReg = *AI; 369 if (State->IsLive(AliasReg)) { 370 State->UnionGroups(Reg, AliasReg); 371 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " << 372 TRI->getName(AliasReg) << ")"); 373 } 374 } 375 376 // Note register reference... 377 const TargetRegisterClass *RC = nullptr; 378 if (i < MI->getDesc().getNumOperands()) 379 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF); 380 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 381 RegRefs.insert(std::make_pair(Reg, RR)); 382 } 383 384 DEBUG(dbgs() << '\n'); 385 386 // Scan the register defs for this instruction and update 387 // live-ranges. 388 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 389 MachineOperand &MO = MI->getOperand(i); 390 if (!MO.isReg() || !MO.isDef()) continue; 391 unsigned Reg = MO.getReg(); 392 if (Reg == 0) continue; 393 // Ignore KILLs and passthru registers for liveness... 394 if (MI->isKill() || (PassthruRegs.count(Reg) != 0)) 395 continue; 396 397 // Update def for Reg and aliases. 398 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 399 // We need to be careful here not to define already-live super registers. 400 // If the super register is already live, then this definition is not 401 // a definition of the whole super register (just a partial insertion 402 // into it). Earlier subregister definitions (which we've not yet visited 403 // because we're iterating bottom-up) need to be linked to the same group 404 // as this definition. 405 if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) 406 continue; 407 408 DefIndices[*AI] = Count; 409 } 410 } 411 } 412 413 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI, 414 unsigned Count) { 415 DEBUG(dbgs() << "\tUse Groups:"); 416 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 417 RegRefs = State->GetRegRefs(); 418 419 // If MI's uses have special allocation requirement, don't allow 420 // any use registers to be changed. Also assume all registers 421 // used in a call must not be changed (ABI). 422 // FIXME: The issue with predicated instruction is more complex. We are being 423 // conservatively here because the kill markers cannot be trusted after 424 // if-conversion: 425 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14] 426 // ... 427 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395] 428 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12] 429 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8) 430 // 431 // The first R6 kill is not really a kill since it's killed by a predicated 432 // instruction which may not be executed. The second R6 def may or may not 433 // re-define R6 so it's not safe to change it since the last R6 use cannot be 434 // changed. 435 bool Special = MI->isCall() || 436 MI->hasExtraSrcRegAllocReq() || 437 TII->isPredicated(MI); 438 439 // Scan the register uses for this instruction and update 440 // live-ranges, groups and RegRefs. 441 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 442 MachineOperand &MO = MI->getOperand(i); 443 if (!MO.isReg() || !MO.isUse()) continue; 444 unsigned Reg = MO.getReg(); 445 if (Reg == 0) continue; 446 447 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << 448 State->GetGroup(Reg)); 449 450 // It wasn't previously live but now it is, this is a kill. Forget 451 // the previous live-range information and start a new live-range 452 // for the register. 453 HandleLastUse(Reg, Count, "(last-use)"); 454 455 if (Special) { 456 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 457 State->UnionGroups(Reg, 0); 458 } 459 460 // Note register reference... 461 const TargetRegisterClass *RC = nullptr; 462 if (i < MI->getDesc().getNumOperands()) 463 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF); 464 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 465 RegRefs.insert(std::make_pair(Reg, RR)); 466 } 467 468 DEBUG(dbgs() << '\n'); 469 470 // Form a group of all defs and uses of a KILL instruction to ensure 471 // that all registers are renamed as a group. 472 if (MI->isKill()) { 473 DEBUG(dbgs() << "\tKill Group:"); 474 475 unsigned FirstReg = 0; 476 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 477 MachineOperand &MO = MI->getOperand(i); 478 if (!MO.isReg()) continue; 479 unsigned Reg = MO.getReg(); 480 if (Reg == 0) continue; 481 482 if (FirstReg != 0) { 483 DEBUG(dbgs() << "=" << TRI->getName(Reg)); 484 State->UnionGroups(FirstReg, Reg); 485 } else { 486 DEBUG(dbgs() << " " << TRI->getName(Reg)); 487 FirstReg = Reg; 488 } 489 } 490 491 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n'); 492 } 493 } 494 495 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { 496 BitVector BV(TRI->getNumRegs(), false); 497 bool first = true; 498 499 // Check all references that need rewriting for Reg. For each, use 500 // the corresponding register class to narrow the set of registers 501 // that are appropriate for renaming. 502 std::pair<std::multimap<unsigned, 503 AggressiveAntiDepState::RegisterReference>::iterator, 504 std::multimap<unsigned, 505 AggressiveAntiDepState::RegisterReference>::iterator> 506 Range = State->GetRegRefs().equal_range(Reg); 507 for (std::multimap<unsigned, 508 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first, 509 QE = Range.second; Q != QE; ++Q) { 510 const TargetRegisterClass *RC = Q->second.RC; 511 if (!RC) continue; 512 513 BitVector RCBV = TRI->getAllocatableSet(MF, RC); 514 if (first) { 515 BV |= RCBV; 516 first = false; 517 } else { 518 BV &= RCBV; 519 } 520 521 DEBUG(dbgs() << " " << RC->getName()); 522 } 523 524 return BV; 525 } 526 527 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( 528 unsigned AntiDepGroupIndex, 529 RenameOrderType& RenameOrder, 530 std::map<unsigned, unsigned> &RenameMap) { 531 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 532 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 533 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 534 RegRefs = State->GetRegRefs(); 535 536 // Collect all referenced registers in the same group as 537 // AntiDepReg. These all need to be renamed together if we are to 538 // break the anti-dependence. 539 std::vector<unsigned> Regs; 540 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs); 541 assert(Regs.size() > 0 && "Empty register group!"); 542 if (Regs.size() == 0) 543 return false; 544 545 // Find the "superest" register in the group. At the same time, 546 // collect the BitVector of registers that can be used to rename 547 // each register. 548 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex 549 << ":\n"); 550 std::map<unsigned, BitVector> RenameRegisterMap; 551 unsigned SuperReg = 0; 552 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 553 unsigned Reg = Regs[i]; 554 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) 555 SuperReg = Reg; 556 557 // If Reg has any references, then collect possible rename regs 558 if (RegRefs.count(Reg) > 0) { 559 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":"); 560 561 BitVector BV = GetRenameRegisters(Reg); 562 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV)); 563 564 DEBUG(dbgs() << " ::"); 565 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r)) 566 dbgs() << " " << TRI->getName(r)); 567 DEBUG(dbgs() << "\n"); 568 } 569 } 570 571 // All group registers should be a subreg of SuperReg. 572 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 573 unsigned Reg = Regs[i]; 574 if (Reg == SuperReg) continue; 575 bool IsSub = TRI->isSubRegister(SuperReg, Reg); 576 // FIXME: remove this once PR18663 has been properly fixed. For now, 577 // return a conservative answer: 578 // assert(IsSub && "Expecting group subregister"); 579 if (!IsSub) 580 return false; 581 } 582 583 #ifndef NDEBUG 584 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod 585 if (DebugDiv > 0) { 586 static int renamecnt = 0; 587 if (renamecnt++ % DebugDiv != DebugMod) 588 return false; 589 590 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) << 591 " for debug ***\n"; 592 } 593 #endif 594 595 // Check each possible rename register for SuperReg in round-robin 596 // order. If that register is available, and the corresponding 597 // registers are available for the other group subregisters, then we 598 // can use those registers to rename. 599 600 // FIXME: Using getMinimalPhysRegClass is very conservative. We should 601 // check every use of the register and find the largest register class 602 // that can be used in all of them. 603 const TargetRegisterClass *SuperRC = 604 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other); 605 606 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC); 607 if (Order.empty()) { 608 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n"); 609 return false; 610 } 611 612 DEBUG(dbgs() << "\tFind Registers:"); 613 614 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size())); 615 616 unsigned OrigR = RenameOrder[SuperRC]; 617 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR); 618 unsigned R = OrigR; 619 do { 620 if (R == 0) R = Order.size(); 621 --R; 622 const unsigned NewSuperReg = Order[R]; 623 // Don't consider non-allocatable registers 624 if (!MRI.isAllocatable(NewSuperReg)) continue; 625 // Don't replace a register with itself. 626 if (NewSuperReg == SuperReg) continue; 627 628 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':'); 629 RenameMap.clear(); 630 631 // For each referenced group register (which must be a SuperReg or 632 // a subregister of SuperReg), find the corresponding subregister 633 // of NewSuperReg and make sure it is free to be renamed. 634 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 635 unsigned Reg = Regs[i]; 636 unsigned NewReg = 0; 637 if (Reg == SuperReg) { 638 NewReg = NewSuperReg; 639 } else { 640 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg); 641 if (NewSubRegIdx != 0) 642 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx); 643 } 644 645 DEBUG(dbgs() << " " << TRI->getName(NewReg)); 646 647 // Check if Reg can be renamed to NewReg. 648 BitVector BV = RenameRegisterMap[Reg]; 649 if (!BV.test(NewReg)) { 650 DEBUG(dbgs() << "(no rename)"); 651 goto next_super_reg; 652 } 653 654 // If NewReg is dead and NewReg's most recent def is not before 655 // Regs's kill, it's safe to replace Reg with NewReg. We 656 // must also check all aliases of NewReg, because we can't define a 657 // register when any sub or super is already live. 658 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) { 659 DEBUG(dbgs() << "(live)"); 660 goto next_super_reg; 661 } else { 662 bool found = false; 663 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) { 664 unsigned AliasReg = *AI; 665 if (State->IsLive(AliasReg) || 666 (KillIndices[Reg] > DefIndices[AliasReg])) { 667 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)"); 668 found = true; 669 break; 670 } 671 } 672 if (found) 673 goto next_super_reg; 674 } 675 676 // Record that 'Reg' can be renamed to 'NewReg'. 677 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg)); 678 } 679 680 // If we fall-out here, then every register in the group can be 681 // renamed, as recorded in RenameMap. 682 RenameOrder.erase(SuperRC); 683 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R)); 684 DEBUG(dbgs() << "]\n"); 685 return true; 686 687 next_super_reg: 688 DEBUG(dbgs() << ']'); 689 } while (R != EndR); 690 691 DEBUG(dbgs() << '\n'); 692 693 // No registers are free and available! 694 return false; 695 } 696 697 /// BreakAntiDependencies - Identifiy anti-dependencies within the 698 /// ScheduleDAG and break them by renaming registers. 699 /// 700 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies( 701 const std::vector<SUnit>& SUnits, 702 MachineBasicBlock::iterator Begin, 703 MachineBasicBlock::iterator End, 704 unsigned InsertPosIndex, 705 DbgValueVector &DbgValues) { 706 707 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 708 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 709 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 710 RegRefs = State->GetRegRefs(); 711 712 // The code below assumes that there is at least one instruction, 713 // so just duck out immediately if the block is empty. 714 if (SUnits.empty()) return 0; 715 716 // For each regclass the next register to use for renaming. 717 RenameOrderType RenameOrder; 718 719 // ...need a map from MI to SUnit. 720 std::map<MachineInstr *, const SUnit *> MISUnitMap; 721 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 722 const SUnit *SU = &SUnits[i]; 723 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(), 724 SU)); 725 } 726 727 // Track progress along the critical path through the SUnit graph as 728 // we walk the instructions. This is needed for regclasses that only 729 // break critical-path anti-dependencies. 730 const SUnit *CriticalPathSU = nullptr; 731 MachineInstr *CriticalPathMI = nullptr; 732 if (CriticalPathSet.any()) { 733 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 734 const SUnit *SU = &SUnits[i]; 735 if (!CriticalPathSU || 736 ((SU->getDepth() + SU->Latency) > 737 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) { 738 CriticalPathSU = SU; 739 } 740 } 741 742 CriticalPathMI = CriticalPathSU->getInstr(); 743 } 744 745 #ifndef NDEBUG 746 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n"); 747 DEBUG(dbgs() << "Available regs:"); 748 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { 749 if (!State->IsLive(Reg)) 750 DEBUG(dbgs() << " " << TRI->getName(Reg)); 751 } 752 DEBUG(dbgs() << '\n'); 753 #endif 754 755 // Attempt to break anti-dependence edges. Walk the instructions 756 // from the bottom up, tracking information about liveness as we go 757 // to help determine which registers are available. 758 unsigned Broken = 0; 759 unsigned Count = InsertPosIndex - 1; 760 for (MachineBasicBlock::iterator I = End, E = Begin; 761 I != E; --Count) { 762 MachineInstr *MI = --I; 763 764 if (MI->isDebugValue()) 765 continue; 766 767 DEBUG(dbgs() << "Anti: "); 768 DEBUG(MI->dump()); 769 770 std::set<unsigned> PassthruRegs; 771 GetPassthruRegs(MI, PassthruRegs); 772 773 // Process the defs in MI... 774 PrescanInstruction(MI, Count, PassthruRegs); 775 776 // The dependence edges that represent anti- and output- 777 // dependencies that are candidates for breaking. 778 std::vector<const SDep *> Edges; 779 const SUnit *PathSU = MISUnitMap[MI]; 780 AntiDepEdges(PathSU, Edges); 781 782 // If MI is not on the critical path, then we don't rename 783 // registers in the CriticalPathSet. 784 BitVector *ExcludeRegs = nullptr; 785 if (MI == CriticalPathMI) { 786 CriticalPathSU = CriticalPathStep(CriticalPathSU); 787 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr; 788 } else if (CriticalPathSet.any()) { 789 ExcludeRegs = &CriticalPathSet; 790 } 791 792 // Ignore KILL instructions (they form a group in ScanInstruction 793 // but don't cause any anti-dependence breaking themselves) 794 if (!MI->isKill()) { 795 // Attempt to break each anti-dependency... 796 for (unsigned i = 0, e = Edges.size(); i != e; ++i) { 797 const SDep *Edge = Edges[i]; 798 SUnit *NextSU = Edge->getSUnit(); 799 800 if ((Edge->getKind() != SDep::Anti) && 801 (Edge->getKind() != SDep::Output)) continue; 802 803 unsigned AntiDepReg = Edge->getReg(); 804 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); 805 assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); 806 807 if (!MRI.isAllocatable(AntiDepReg)) { 808 // Don't break anti-dependencies on non-allocatable registers. 809 DEBUG(dbgs() << " (non-allocatable)\n"); 810 continue; 811 } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) { 812 // Don't break anti-dependencies for critical path registers 813 // if not on the critical path 814 DEBUG(dbgs() << " (not critical-path)\n"); 815 continue; 816 } else if (PassthruRegs.count(AntiDepReg) != 0) { 817 // If the anti-dep register liveness "passes-thru", then 818 // don't try to change it. It will be changed along with 819 // the use if required to break an earlier antidep. 820 DEBUG(dbgs() << " (passthru)\n"); 821 continue; 822 } else { 823 // No anti-dep breaking for implicit deps 824 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg); 825 assert(AntiDepOp && "Can't find index for defined register operand"); 826 if (!AntiDepOp || AntiDepOp->isImplicit()) { 827 DEBUG(dbgs() << " (implicit)\n"); 828 continue; 829 } 830 831 // If the SUnit has other dependencies on the SUnit that 832 // it anti-depends on, don't bother breaking the 833 // anti-dependency since those edges would prevent such 834 // units from being scheduled past each other 835 // regardless. 836 // 837 // Also, if there are dependencies on other SUnits with the 838 // same register as the anti-dependency, don't attempt to 839 // break it. 840 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 841 PE = PathSU->Preds.end(); P != PE; ++P) { 842 if (P->getSUnit() == NextSU ? 843 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) : 844 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) { 845 AntiDepReg = 0; 846 break; 847 } 848 } 849 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 850 PE = PathSU->Preds.end(); P != PE; ++P) { 851 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) && 852 (P->getKind() != SDep::Output)) { 853 DEBUG(dbgs() << " (real dependency)\n"); 854 AntiDepReg = 0; 855 break; 856 } else if ((P->getSUnit() != NextSU) && 857 (P->getKind() == SDep::Data) && 858 (P->getReg() == AntiDepReg)) { 859 DEBUG(dbgs() << " (other dependency)\n"); 860 AntiDepReg = 0; 861 break; 862 } 863 } 864 865 if (AntiDepReg == 0) continue; 866 } 867 868 assert(AntiDepReg != 0); 869 if (AntiDepReg == 0) continue; 870 871 // Determine AntiDepReg's register group. 872 const unsigned GroupIndex = State->GetGroup(AntiDepReg); 873 if (GroupIndex == 0) { 874 DEBUG(dbgs() << " (zero group)\n"); 875 continue; 876 } 877 878 DEBUG(dbgs() << '\n'); 879 880 // Look for a suitable register to use to break the anti-dependence. 881 std::map<unsigned, unsigned> RenameMap; 882 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) { 883 DEBUG(dbgs() << "\tBreaking anti-dependence edge on " 884 << TRI->getName(AntiDepReg) << ":"); 885 886 // Handle each group register... 887 for (std::map<unsigned, unsigned>::iterator 888 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { 889 unsigned CurrReg = S->first; 890 unsigned NewReg = S->second; 891 892 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" << 893 TRI->getName(NewReg) << "(" << 894 RegRefs.count(CurrReg) << " refs)"); 895 896 // Update the references to the old register CurrReg to 897 // refer to the new register NewReg. 898 std::pair<std::multimap<unsigned, 899 AggressiveAntiDepState::RegisterReference>::iterator, 900 std::multimap<unsigned, 901 AggressiveAntiDepState::RegisterReference>::iterator> 902 Range = RegRefs.equal_range(CurrReg); 903 for (std::multimap<unsigned, 904 AggressiveAntiDepState::RegisterReference>::iterator 905 Q = Range.first, QE = Range.second; Q != QE; ++Q) { 906 Q->second.Operand->setReg(NewReg); 907 // If the SU for the instruction being updated has debug 908 // information related to the anti-dependency register, make 909 // sure to update that as well. 910 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()]; 911 if (!SU) continue; 912 for (DbgValueVector::iterator DVI = DbgValues.begin(), 913 DVE = DbgValues.end(); DVI != DVE; ++DVI) 914 if (DVI->second == Q->second.Operand->getParent()) 915 UpdateDbgValue(DVI->first, AntiDepReg, NewReg); 916 } 917 918 // We just went back in time and modified history; the 919 // liveness information for CurrReg is now inconsistent. Set 920 // the state as if it were dead. 921 State->UnionGroups(NewReg, 0); 922 RegRefs.erase(NewReg); 923 DefIndices[NewReg] = DefIndices[CurrReg]; 924 KillIndices[NewReg] = KillIndices[CurrReg]; 925 926 State->UnionGroups(CurrReg, 0); 927 RegRefs.erase(CurrReg); 928 DefIndices[CurrReg] = KillIndices[CurrReg]; 929 KillIndices[CurrReg] = ~0u; 930 assert(((KillIndices[CurrReg] == ~0u) != 931 (DefIndices[CurrReg] == ~0u)) && 932 "Kill and Def maps aren't consistent for AntiDepReg!"); 933 } 934 935 ++Broken; 936 DEBUG(dbgs() << '\n'); 937 } 938 } 939 } 940 941 ScanInstruction(MI, Count); 942 } 943 944 return Broken; 945 } 946