1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the AggressiveAntiDepBreaker class, which 11 // implements register anti-dependence breaking during post-RA 12 // scheduling. It attempts to break all anti-dependencies within a 13 // block. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #define DEBUG_TYPE "post-RA-sched" 18 #include "AggressiveAntiDepBreaker.h" 19 #include "llvm/CodeGen/MachineBasicBlock.h" 20 #include "llvm/CodeGen/MachineFrameInfo.h" 21 #include "llvm/CodeGen/MachineInstr.h" 22 #include "llvm/CodeGen/RegisterClassInfo.h" 23 #include "llvm/Support/CommandLine.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/ErrorHandling.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include "llvm/Target/TargetInstrInfo.h" 28 #include "llvm/Target/TargetMachine.h" 29 #include "llvm/Target/TargetRegisterInfo.h" 30 using namespace llvm; 31 32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod 33 static cl::opt<int> 34 DebugDiv("agg-antidep-debugdiv", 35 cl::desc("Debug control for aggressive anti-dep breaker"), 36 cl::init(0), cl::Hidden); 37 static cl::opt<int> 38 DebugMod("agg-antidep-debugmod", 39 cl::desc("Debug control for aggressive anti-dep breaker"), 40 cl::init(0), cl::Hidden); 41 42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs, 43 MachineBasicBlock *BB) : 44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0), 45 GroupNodeIndices(TargetRegs, 0), 46 KillIndices(TargetRegs, 0), 47 DefIndices(TargetRegs, 0) 48 { 49 const unsigned BBSize = BB->size(); 50 for (unsigned i = 0; i < NumTargetRegs; ++i) { 51 // Initialize all registers to be in their own group. Initially we 52 // assign the register to the same-indexed GroupNode. 53 GroupNodeIndices[i] = i; 54 // Initialize the indices to indicate that no registers are live. 55 KillIndices[i] = ~0u; 56 DefIndices[i] = BBSize; 57 } 58 } 59 60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) { 61 unsigned Node = GroupNodeIndices[Reg]; 62 while (GroupNodes[Node] != Node) 63 Node = GroupNodes[Node]; 64 65 return Node; 66 } 67 68 void AggressiveAntiDepState::GetGroupRegs( 69 unsigned Group, 70 std::vector<unsigned> &Regs, 71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs) 72 { 73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) { 74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0)) 75 Regs.push_back(Reg); 76 } 77 } 78 79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2) 80 { 81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!"); 82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!"); 83 84 // find group for each register 85 unsigned Group1 = GetGroup(Reg1); 86 unsigned Group2 = GetGroup(Reg2); 87 88 // if either group is 0, then that must become the parent 89 unsigned Parent = (Group1 == 0) ? Group1 : Group2; 90 unsigned Other = (Parent == Group1) ? Group2 : Group1; 91 GroupNodes.at(Other) = Parent; 92 return Parent; 93 } 94 95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg) 96 { 97 // Create a new GroupNode for Reg. Reg's existing GroupNode must 98 // stay as is because there could be other GroupNodes referring to 99 // it. 100 unsigned idx = GroupNodes.size(); 101 GroupNodes.push_back(idx); 102 GroupNodeIndices[Reg] = idx; 103 return idx; 104 } 105 106 bool AggressiveAntiDepState::IsLive(unsigned Reg) 107 { 108 // KillIndex must be defined and DefIndex not defined for a register 109 // to be live. 110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u)); 111 } 112 113 114 115 AggressiveAntiDepBreaker:: 116 AggressiveAntiDepBreaker(MachineFunction& MFi, 117 const RegisterClassInfo &RCI, 118 TargetSubtargetInfo::RegClassVector& CriticalPathRCs) : 119 AntiDepBreaker(), MF(MFi), 120 MRI(MF.getRegInfo()), 121 TII(MF.getTarget().getInstrInfo()), 122 TRI(MF.getTarget().getRegisterInfo()), 123 RegClassInfo(RCI), 124 State(NULL) { 125 /* Collect a bitset of all registers that are only broken if they 126 are on the critical path. */ 127 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) { 128 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]); 129 if (CriticalPathSet.none()) 130 CriticalPathSet = CPSet; 131 else 132 CriticalPathSet |= CPSet; 133 } 134 135 DEBUG(dbgs() << "AntiDep Critical-Path Registers:"); 136 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1; 137 r = CriticalPathSet.find_next(r)) 138 dbgs() << " " << TRI->getName(r)); 139 DEBUG(dbgs() << '\n'); 140 } 141 142 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() { 143 delete State; 144 } 145 146 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) { 147 assert(State == NULL); 148 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB); 149 150 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn()); 151 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 152 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 153 154 // Determine the live-out physregs for this block. 155 if (IsReturnBlock) { 156 // In a return block, examine the function live-out regs. 157 for (MachineRegisterInfo::liveout_iterator I = MRI.liveout_begin(), 158 E = MRI.liveout_end(); I != E; ++I) { 159 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) { 160 unsigned Reg = *AI; 161 State->UnionGroups(Reg, 0); 162 KillIndices[Reg] = BB->size(); 163 DefIndices[Reg] = ~0u; 164 } 165 } 166 } 167 168 // In a non-return block, examine the live-in regs of all successors. 169 // Note a return block can have successors if the return instruction is 170 // predicated. 171 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(), 172 SE = BB->succ_end(); SI != SE; ++SI) 173 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(), 174 E = (*SI)->livein_end(); I != E; ++I) { 175 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) { 176 unsigned Reg = *AI; 177 State->UnionGroups(Reg, 0); 178 KillIndices[Reg] = BB->size(); 179 DefIndices[Reg] = ~0u; 180 } 181 } 182 183 // Mark live-out callee-saved registers. In a return block this is 184 // all callee-saved registers. In non-return this is any 185 // callee-saved register that is not saved in the prolog. 186 const MachineFrameInfo *MFI = MF.getFrameInfo(); 187 BitVector Pristine = MFI->getPristineRegs(BB); 188 for (const uint16_t *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) { 189 unsigned Reg = *I; 190 if (!IsReturnBlock && !Pristine.test(Reg)) continue; 191 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 192 unsigned AliasReg = *AI; 193 State->UnionGroups(AliasReg, 0); 194 KillIndices[AliasReg] = BB->size(); 195 DefIndices[AliasReg] = ~0u; 196 } 197 } 198 } 199 200 void AggressiveAntiDepBreaker::FinishBlock() { 201 delete State; 202 State = NULL; 203 } 204 205 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count, 206 unsigned InsertPosIndex) { 207 assert(Count < InsertPosIndex && "Instruction index out of expected range!"); 208 209 std::set<unsigned> PassthruRegs; 210 GetPassthruRegs(MI, PassthruRegs); 211 PrescanInstruction(MI, Count, PassthruRegs); 212 ScanInstruction(MI, Count); 213 214 DEBUG(dbgs() << "Observe: "); 215 DEBUG(MI->dump()); 216 DEBUG(dbgs() << "\tRegs:"); 217 218 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 219 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) { 220 // If Reg is current live, then mark that it can't be renamed as 221 // we don't know the extent of its live-range anymore (now that it 222 // has been scheduled). If it is not live but was defined in the 223 // previous schedule region, then set its def index to the most 224 // conservative location (i.e. the beginning of the previous 225 // schedule region). 226 if (State->IsLive(Reg)) { 227 DEBUG(if (State->GetGroup(Reg) != 0) 228 dbgs() << " " << TRI->getName(Reg) << "=g" << 229 State->GetGroup(Reg) << "->g0(region live-out)"); 230 State->UnionGroups(Reg, 0); 231 } else if ((DefIndices[Reg] < InsertPosIndex) 232 && (DefIndices[Reg] >= Count)) { 233 DefIndices[Reg] = Count; 234 } 235 } 236 DEBUG(dbgs() << '\n'); 237 } 238 239 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI, 240 MachineOperand& MO) 241 { 242 if (!MO.isReg() || !MO.isImplicit()) 243 return false; 244 245 unsigned Reg = MO.getReg(); 246 if (Reg == 0) 247 return false; 248 249 MachineOperand *Op = NULL; 250 if (MO.isDef()) 251 Op = MI->findRegisterUseOperand(Reg, true); 252 else 253 Op = MI->findRegisterDefOperand(Reg); 254 255 return((Op != NULL) && Op->isImplicit()); 256 } 257 258 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI, 259 std::set<unsigned>& PassthruRegs) { 260 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 261 MachineOperand &MO = MI->getOperand(i); 262 if (!MO.isReg()) continue; 263 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) || 264 IsImplicitDefUse(MI, MO)) { 265 const unsigned Reg = MO.getReg(); 266 PassthruRegs.insert(Reg); 267 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) 268 PassthruRegs.insert(*SubRegs); 269 } 270 } 271 } 272 273 /// AntiDepEdges - Return in Edges the anti- and output- dependencies 274 /// in SU that we want to consider for breaking. 275 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) { 276 SmallSet<unsigned, 4> RegSet; 277 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 278 P != PE; ++P) { 279 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) { 280 unsigned Reg = P->getReg(); 281 if (RegSet.count(Reg) == 0) { 282 Edges.push_back(&*P); 283 RegSet.insert(Reg); 284 } 285 } 286 } 287 } 288 289 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up 290 /// critical path. 291 static const SUnit *CriticalPathStep(const SUnit *SU) { 292 const SDep *Next = 0; 293 unsigned NextDepth = 0; 294 // Find the predecessor edge with the greatest depth. 295 if (SU != 0) { 296 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end(); 297 P != PE; ++P) { 298 const SUnit *PredSU = P->getSUnit(); 299 unsigned PredLatency = P->getLatency(); 300 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency; 301 // In the case of a latency tie, prefer an anti-dependency edge over 302 // other types of edges. 303 if (NextDepth < PredTotalLatency || 304 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) { 305 NextDepth = PredTotalLatency; 306 Next = &*P; 307 } 308 } 309 } 310 311 return (Next) ? Next->getSUnit() : 0; 312 } 313 314 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx, 315 const char *tag, 316 const char *header, 317 const char *footer) { 318 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 319 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 320 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 321 RegRefs = State->GetRegRefs(); 322 323 if (!State->IsLive(Reg)) { 324 KillIndices[Reg] = KillIdx; 325 DefIndices[Reg] = ~0u; 326 RegRefs.erase(Reg); 327 State->LeaveGroup(Reg); 328 DEBUG(if (header != NULL) { 329 dbgs() << header << TRI->getName(Reg); header = NULL; }); 330 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag); 331 } 332 // Repeat for subregisters. 333 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { 334 unsigned SubregReg = *SubRegs; 335 if (!State->IsLive(SubregReg)) { 336 KillIndices[SubregReg] = KillIdx; 337 DefIndices[SubregReg] = ~0u; 338 RegRefs.erase(SubregReg); 339 State->LeaveGroup(SubregReg); 340 DEBUG(if (header != NULL) { 341 dbgs() << header << TRI->getName(Reg); header = NULL; }); 342 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" << 343 State->GetGroup(SubregReg) << tag); 344 } 345 } 346 347 DEBUG(if ((header == NULL) && (footer != NULL)) dbgs() << footer); 348 } 349 350 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI, 351 unsigned Count, 352 std::set<unsigned>& PassthruRegs) { 353 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 354 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 355 RegRefs = State->GetRegRefs(); 356 357 // Handle dead defs by simulating a last-use of the register just 358 // after the def. A dead def can occur because the def is truly 359 // dead, or because only a subregister is live at the def. If we 360 // don't do this the dead def will be incorrectly merged into the 361 // previous def. 362 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 363 MachineOperand &MO = MI->getOperand(i); 364 if (!MO.isReg() || !MO.isDef()) continue; 365 unsigned Reg = MO.getReg(); 366 if (Reg == 0) continue; 367 368 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n"); 369 } 370 371 DEBUG(dbgs() << "\tDef Groups:"); 372 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 373 MachineOperand &MO = MI->getOperand(i); 374 if (!MO.isReg() || !MO.isDef()) continue; 375 unsigned Reg = MO.getReg(); 376 if (Reg == 0) continue; 377 378 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg)); 379 380 // If MI's defs have a special allocation requirement, don't allow 381 // any def registers to be changed. Also assume all registers 382 // defined in a call must not be changed (ABI). 383 if (MI->isCall() || MI->hasExtraDefRegAllocReq() || 384 TII->isPredicated(MI)) { 385 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 386 State->UnionGroups(Reg, 0); 387 } 388 389 // Any aliased that are live at this point are completely or 390 // partially defined here, so group those aliases with Reg. 391 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) { 392 unsigned AliasReg = *AI; 393 if (State->IsLive(AliasReg)) { 394 State->UnionGroups(Reg, AliasReg); 395 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " << 396 TRI->getName(AliasReg) << ")"); 397 } 398 } 399 400 // Note register reference... 401 const TargetRegisterClass *RC = NULL; 402 if (i < MI->getDesc().getNumOperands()) 403 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF); 404 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 405 RegRefs.insert(std::make_pair(Reg, RR)); 406 } 407 408 DEBUG(dbgs() << '\n'); 409 410 // Scan the register defs for this instruction and update 411 // live-ranges. 412 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 413 MachineOperand &MO = MI->getOperand(i); 414 if (!MO.isReg() || !MO.isDef()) continue; 415 unsigned Reg = MO.getReg(); 416 if (Reg == 0) continue; 417 // Ignore KILLs and passthru registers for liveness... 418 if (MI->isKill() || (PassthruRegs.count(Reg) != 0)) 419 continue; 420 421 // Update def for Reg and aliases. 422 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) 423 DefIndices[*AI] = Count; 424 } 425 } 426 427 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI, 428 unsigned Count) { 429 DEBUG(dbgs() << "\tUse Groups:"); 430 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 431 RegRefs = State->GetRegRefs(); 432 433 // If MI's uses have special allocation requirement, don't allow 434 // any use registers to be changed. Also assume all registers 435 // used in a call must not be changed (ABI). 436 // FIXME: The issue with predicated instruction is more complex. We are being 437 // conservatively here because the kill markers cannot be trusted after 438 // if-conversion: 439 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14] 440 // ... 441 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395] 442 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12] 443 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8) 444 // 445 // The first R6 kill is not really a kill since it's killed by a predicated 446 // instruction which may not be executed. The second R6 def may or may not 447 // re-define R6 so it's not safe to change it since the last R6 use cannot be 448 // changed. 449 bool Special = MI->isCall() || 450 MI->hasExtraSrcRegAllocReq() || 451 TII->isPredicated(MI); 452 453 // Scan the register uses for this instruction and update 454 // live-ranges, groups and RegRefs. 455 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 456 MachineOperand &MO = MI->getOperand(i); 457 if (!MO.isReg() || !MO.isUse()) continue; 458 unsigned Reg = MO.getReg(); 459 if (Reg == 0) continue; 460 461 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << 462 State->GetGroup(Reg)); 463 464 // It wasn't previously live but now it is, this is a kill. Forget 465 // the previous live-range information and start a new live-range 466 // for the register. 467 HandleLastUse(Reg, Count, "(last-use)"); 468 469 if (Special) { 470 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)"); 471 State->UnionGroups(Reg, 0); 472 } 473 474 // Note register reference... 475 const TargetRegisterClass *RC = NULL; 476 if (i < MI->getDesc().getNumOperands()) 477 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF); 478 AggressiveAntiDepState::RegisterReference RR = { &MO, RC }; 479 RegRefs.insert(std::make_pair(Reg, RR)); 480 } 481 482 DEBUG(dbgs() << '\n'); 483 484 // Form a group of all defs and uses of a KILL instruction to ensure 485 // that all registers are renamed as a group. 486 if (MI->isKill()) { 487 DEBUG(dbgs() << "\tKill Group:"); 488 489 unsigned FirstReg = 0; 490 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 491 MachineOperand &MO = MI->getOperand(i); 492 if (!MO.isReg()) continue; 493 unsigned Reg = MO.getReg(); 494 if (Reg == 0) continue; 495 496 if (FirstReg != 0) { 497 DEBUG(dbgs() << "=" << TRI->getName(Reg)); 498 State->UnionGroups(FirstReg, Reg); 499 } else { 500 DEBUG(dbgs() << " " << TRI->getName(Reg)); 501 FirstReg = Reg; 502 } 503 } 504 505 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n'); 506 } 507 } 508 509 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) { 510 BitVector BV(TRI->getNumRegs(), false); 511 bool first = true; 512 513 // Check all references that need rewriting for Reg. For each, use 514 // the corresponding register class to narrow the set of registers 515 // that are appropriate for renaming. 516 std::pair<std::multimap<unsigned, 517 AggressiveAntiDepState::RegisterReference>::iterator, 518 std::multimap<unsigned, 519 AggressiveAntiDepState::RegisterReference>::iterator> 520 Range = State->GetRegRefs().equal_range(Reg); 521 for (std::multimap<unsigned, 522 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first, 523 QE = Range.second; Q != QE; ++Q) { 524 const TargetRegisterClass *RC = Q->second.RC; 525 if (RC == NULL) continue; 526 527 BitVector RCBV = TRI->getAllocatableSet(MF, RC); 528 if (first) { 529 BV |= RCBV; 530 first = false; 531 } else { 532 BV &= RCBV; 533 } 534 535 DEBUG(dbgs() << " " << RC->getName()); 536 } 537 538 return BV; 539 } 540 541 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters( 542 unsigned AntiDepGroupIndex, 543 RenameOrderType& RenameOrder, 544 std::map<unsigned, unsigned> &RenameMap) { 545 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 546 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 547 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 548 RegRefs = State->GetRegRefs(); 549 550 // Collect all referenced registers in the same group as 551 // AntiDepReg. These all need to be renamed together if we are to 552 // break the anti-dependence. 553 std::vector<unsigned> Regs; 554 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs); 555 assert(Regs.size() > 0 && "Empty register group!"); 556 if (Regs.size() == 0) 557 return false; 558 559 // Find the "superest" register in the group. At the same time, 560 // collect the BitVector of registers that can be used to rename 561 // each register. 562 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex 563 << ":\n"); 564 std::map<unsigned, BitVector> RenameRegisterMap; 565 unsigned SuperReg = 0; 566 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 567 unsigned Reg = Regs[i]; 568 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg)) 569 SuperReg = Reg; 570 571 // If Reg has any references, then collect possible rename regs 572 if (RegRefs.count(Reg) > 0) { 573 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":"); 574 575 BitVector BV = GetRenameRegisters(Reg); 576 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV)); 577 578 DEBUG(dbgs() << " ::"); 579 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r)) 580 dbgs() << " " << TRI->getName(r)); 581 DEBUG(dbgs() << "\n"); 582 } 583 } 584 585 // All group registers should be a subreg of SuperReg. 586 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 587 unsigned Reg = Regs[i]; 588 if (Reg == SuperReg) continue; 589 bool IsSub = TRI->isSubRegister(SuperReg, Reg); 590 assert(IsSub && "Expecting group subregister"); 591 if (!IsSub) 592 return false; 593 } 594 595 #ifndef NDEBUG 596 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod 597 if (DebugDiv > 0) { 598 static int renamecnt = 0; 599 if (renamecnt++ % DebugDiv != DebugMod) 600 return false; 601 602 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) << 603 " for debug ***\n"; 604 } 605 #endif 606 607 // Check each possible rename register for SuperReg in round-robin 608 // order. If that register is available, and the corresponding 609 // registers are available for the other group subregisters, then we 610 // can use those registers to rename. 611 612 // FIXME: Using getMinimalPhysRegClass is very conservative. We should 613 // check every use of the register and find the largest register class 614 // that can be used in all of them. 615 const TargetRegisterClass *SuperRC = 616 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other); 617 618 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC); 619 if (Order.empty()) { 620 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n"); 621 return false; 622 } 623 624 DEBUG(dbgs() << "\tFind Registers:"); 625 626 if (RenameOrder.count(SuperRC) == 0) 627 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size())); 628 629 unsigned OrigR = RenameOrder[SuperRC]; 630 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR); 631 unsigned R = OrigR; 632 do { 633 if (R == 0) R = Order.size(); 634 --R; 635 const unsigned NewSuperReg = Order[R]; 636 // Don't consider non-allocatable registers 637 if (!MRI.isAllocatable(NewSuperReg)) continue; 638 // Don't replace a register with itself. 639 if (NewSuperReg == SuperReg) continue; 640 641 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':'); 642 RenameMap.clear(); 643 644 // For each referenced group register (which must be a SuperReg or 645 // a subregister of SuperReg), find the corresponding subregister 646 // of NewSuperReg and make sure it is free to be renamed. 647 for (unsigned i = 0, e = Regs.size(); i != e; ++i) { 648 unsigned Reg = Regs[i]; 649 unsigned NewReg = 0; 650 if (Reg == SuperReg) { 651 NewReg = NewSuperReg; 652 } else { 653 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg); 654 if (NewSubRegIdx != 0) 655 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx); 656 } 657 658 DEBUG(dbgs() << " " << TRI->getName(NewReg)); 659 660 // Check if Reg can be renamed to NewReg. 661 BitVector BV = RenameRegisterMap[Reg]; 662 if (!BV.test(NewReg)) { 663 DEBUG(dbgs() << "(no rename)"); 664 goto next_super_reg; 665 } 666 667 // If NewReg is dead and NewReg's most recent def is not before 668 // Regs's kill, it's safe to replace Reg with NewReg. We 669 // must also check all aliases of NewReg, because we can't define a 670 // register when any sub or super is already live. 671 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) { 672 DEBUG(dbgs() << "(live)"); 673 goto next_super_reg; 674 } else { 675 bool found = false; 676 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) { 677 unsigned AliasReg = *AI; 678 if (State->IsLive(AliasReg) || 679 (KillIndices[Reg] > DefIndices[AliasReg])) { 680 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)"); 681 found = true; 682 break; 683 } 684 } 685 if (found) 686 goto next_super_reg; 687 } 688 689 // Record that 'Reg' can be renamed to 'NewReg'. 690 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg)); 691 } 692 693 // If we fall-out here, then every register in the group can be 694 // renamed, as recorded in RenameMap. 695 RenameOrder.erase(SuperRC); 696 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R)); 697 DEBUG(dbgs() << "]\n"); 698 return true; 699 700 next_super_reg: 701 DEBUG(dbgs() << ']'); 702 } while (R != EndR); 703 704 DEBUG(dbgs() << '\n'); 705 706 // No registers are free and available! 707 return false; 708 } 709 710 /// BreakAntiDependencies - Identifiy anti-dependencies within the 711 /// ScheduleDAG and break them by renaming registers. 712 /// 713 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies( 714 const std::vector<SUnit>& SUnits, 715 MachineBasicBlock::iterator Begin, 716 MachineBasicBlock::iterator End, 717 unsigned InsertPosIndex, 718 DbgValueVector &DbgValues) { 719 720 std::vector<unsigned> &KillIndices = State->GetKillIndices(); 721 std::vector<unsigned> &DefIndices = State->GetDefIndices(); 722 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>& 723 RegRefs = State->GetRegRefs(); 724 725 // The code below assumes that there is at least one instruction, 726 // so just duck out immediately if the block is empty. 727 if (SUnits.empty()) return 0; 728 729 // For each regclass the next register to use for renaming. 730 RenameOrderType RenameOrder; 731 732 // ...need a map from MI to SUnit. 733 std::map<MachineInstr *, const SUnit *> MISUnitMap; 734 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 735 const SUnit *SU = &SUnits[i]; 736 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(), 737 SU)); 738 } 739 740 // Track progress along the critical path through the SUnit graph as 741 // we walk the instructions. This is needed for regclasses that only 742 // break critical-path anti-dependencies. 743 const SUnit *CriticalPathSU = 0; 744 MachineInstr *CriticalPathMI = 0; 745 if (CriticalPathSet.any()) { 746 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) { 747 const SUnit *SU = &SUnits[i]; 748 if (!CriticalPathSU || 749 ((SU->getDepth() + SU->Latency) > 750 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) { 751 CriticalPathSU = SU; 752 } 753 } 754 755 CriticalPathMI = CriticalPathSU->getInstr(); 756 } 757 758 #ifndef NDEBUG 759 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n"); 760 DEBUG(dbgs() << "Available regs:"); 761 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) { 762 if (!State->IsLive(Reg)) 763 DEBUG(dbgs() << " " << TRI->getName(Reg)); 764 } 765 DEBUG(dbgs() << '\n'); 766 #endif 767 768 // Attempt to break anti-dependence edges. Walk the instructions 769 // from the bottom up, tracking information about liveness as we go 770 // to help determine which registers are available. 771 unsigned Broken = 0; 772 unsigned Count = InsertPosIndex - 1; 773 for (MachineBasicBlock::iterator I = End, E = Begin; 774 I != E; --Count) { 775 MachineInstr *MI = --I; 776 777 if (MI->isDebugValue()) 778 continue; 779 780 DEBUG(dbgs() << "Anti: "); 781 DEBUG(MI->dump()); 782 783 std::set<unsigned> PassthruRegs; 784 GetPassthruRegs(MI, PassthruRegs); 785 786 // Process the defs in MI... 787 PrescanInstruction(MI, Count, PassthruRegs); 788 789 // The dependence edges that represent anti- and output- 790 // dependencies that are candidates for breaking. 791 std::vector<const SDep *> Edges; 792 const SUnit *PathSU = MISUnitMap[MI]; 793 AntiDepEdges(PathSU, Edges); 794 795 // If MI is not on the critical path, then we don't rename 796 // registers in the CriticalPathSet. 797 BitVector *ExcludeRegs = NULL; 798 if (MI == CriticalPathMI) { 799 CriticalPathSU = CriticalPathStep(CriticalPathSU); 800 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : 0; 801 } else { 802 ExcludeRegs = &CriticalPathSet; 803 } 804 805 // Ignore KILL instructions (they form a group in ScanInstruction 806 // but don't cause any anti-dependence breaking themselves) 807 if (!MI->isKill()) { 808 // Attempt to break each anti-dependency... 809 for (unsigned i = 0, e = Edges.size(); i != e; ++i) { 810 const SDep *Edge = Edges[i]; 811 SUnit *NextSU = Edge->getSUnit(); 812 813 if ((Edge->getKind() != SDep::Anti) && 814 (Edge->getKind() != SDep::Output)) continue; 815 816 unsigned AntiDepReg = Edge->getReg(); 817 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg)); 818 assert(AntiDepReg != 0 && "Anti-dependence on reg0?"); 819 820 if (!MRI.isAllocatable(AntiDepReg)) { 821 // Don't break anti-dependencies on non-allocatable registers. 822 DEBUG(dbgs() << " (non-allocatable)\n"); 823 continue; 824 } else if ((ExcludeRegs != NULL) && ExcludeRegs->test(AntiDepReg)) { 825 // Don't break anti-dependencies for critical path registers 826 // if not on the critical path 827 DEBUG(dbgs() << " (not critical-path)\n"); 828 continue; 829 } else if (PassthruRegs.count(AntiDepReg) != 0) { 830 // If the anti-dep register liveness "passes-thru", then 831 // don't try to change it. It will be changed along with 832 // the use if required to break an earlier antidep. 833 DEBUG(dbgs() << " (passthru)\n"); 834 continue; 835 } else { 836 // No anti-dep breaking for implicit deps 837 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg); 838 assert(AntiDepOp != NULL && 839 "Can't find index for defined register operand"); 840 if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) { 841 DEBUG(dbgs() << " (implicit)\n"); 842 continue; 843 } 844 845 // If the SUnit has other dependencies on the SUnit that 846 // it anti-depends on, don't bother breaking the 847 // anti-dependency since those edges would prevent such 848 // units from being scheduled past each other 849 // regardless. 850 // 851 // Also, if there are dependencies on other SUnits with the 852 // same register as the anti-dependency, don't attempt to 853 // break it. 854 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 855 PE = PathSU->Preds.end(); P != PE; ++P) { 856 if (P->getSUnit() == NextSU ? 857 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) : 858 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) { 859 AntiDepReg = 0; 860 break; 861 } 862 } 863 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(), 864 PE = PathSU->Preds.end(); P != PE; ++P) { 865 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) && 866 (P->getKind() != SDep::Output)) { 867 DEBUG(dbgs() << " (real dependency)\n"); 868 AntiDepReg = 0; 869 break; 870 } else if ((P->getSUnit() != NextSU) && 871 (P->getKind() == SDep::Data) && 872 (P->getReg() == AntiDepReg)) { 873 DEBUG(dbgs() << " (other dependency)\n"); 874 AntiDepReg = 0; 875 break; 876 } 877 } 878 879 if (AntiDepReg == 0) continue; 880 } 881 882 assert(AntiDepReg != 0); 883 if (AntiDepReg == 0) continue; 884 885 // Determine AntiDepReg's register group. 886 const unsigned GroupIndex = State->GetGroup(AntiDepReg); 887 if (GroupIndex == 0) { 888 DEBUG(dbgs() << " (zero group)\n"); 889 continue; 890 } 891 892 DEBUG(dbgs() << '\n'); 893 894 // Look for a suitable register to use to break the anti-dependence. 895 std::map<unsigned, unsigned> RenameMap; 896 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) { 897 DEBUG(dbgs() << "\tBreaking anti-dependence edge on " 898 << TRI->getName(AntiDepReg) << ":"); 899 900 // Handle each group register... 901 for (std::map<unsigned, unsigned>::iterator 902 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) { 903 unsigned CurrReg = S->first; 904 unsigned NewReg = S->second; 905 906 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" << 907 TRI->getName(NewReg) << "(" << 908 RegRefs.count(CurrReg) << " refs)"); 909 910 // Update the references to the old register CurrReg to 911 // refer to the new register NewReg. 912 std::pair<std::multimap<unsigned, 913 AggressiveAntiDepState::RegisterReference>::iterator, 914 std::multimap<unsigned, 915 AggressiveAntiDepState::RegisterReference>::iterator> 916 Range = RegRefs.equal_range(CurrReg); 917 for (std::multimap<unsigned, 918 AggressiveAntiDepState::RegisterReference>::iterator 919 Q = Range.first, QE = Range.second; Q != QE; ++Q) { 920 Q->second.Operand->setReg(NewReg); 921 // If the SU for the instruction being updated has debug 922 // information related to the anti-dependency register, make 923 // sure to update that as well. 924 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()]; 925 if (!SU) continue; 926 for (DbgValueVector::iterator DVI = DbgValues.begin(), 927 DVE = DbgValues.end(); DVI != DVE; ++DVI) 928 if (DVI->second == Q->second.Operand->getParent()) 929 UpdateDbgValue(DVI->first, AntiDepReg, NewReg); 930 } 931 932 // We just went back in time and modified history; the 933 // liveness information for CurrReg is now inconsistent. Set 934 // the state as if it were dead. 935 State->UnionGroups(NewReg, 0); 936 RegRefs.erase(NewReg); 937 DefIndices[NewReg] = DefIndices[CurrReg]; 938 KillIndices[NewReg] = KillIndices[CurrReg]; 939 940 State->UnionGroups(CurrReg, 0); 941 RegRefs.erase(CurrReg); 942 DefIndices[CurrReg] = KillIndices[CurrReg]; 943 KillIndices[CurrReg] = ~0u; 944 assert(((KillIndices[CurrReg] == ~0u) != 945 (DefIndices[CurrReg] == ~0u)) && 946 "Kill and Def maps aren't consistent for AntiDepReg!"); 947 } 948 949 ++Broken; 950 DEBUG(dbgs() << '\n'); 951 } 952 } 953 } 954 955 ScanInstruction(MI, Count); 956 } 957 958 return Broken; 959 } 960