1 //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This is an extremely simple MachineInstr-level copy propagation pass. 10 // 11 // This pass forwards the source of COPYs to the users of their destinations 12 // when doing so is legal. For example: 13 // 14 // %reg1 = COPY %reg0 15 // ... 16 // ... = OP %reg1 17 // 18 // If 19 // - %reg0 has not been clobbered by the time of the use of %reg1 20 // - the register class constraints are satisfied 21 // - the COPY def is the only value that reaches OP 22 // then this pass replaces the above with: 23 // 24 // %reg1 = COPY %reg0 25 // ... 26 // ... = OP %reg0 27 // 28 // This pass also removes some redundant COPYs. For example: 29 // 30 // %R1 = COPY %R0 31 // ... // No clobber of %R1 32 // %R0 = COPY %R1 <<< Removed 33 // 34 // or 35 // 36 // %R1 = COPY %R0 37 // ... // No clobber of %R0 38 // %R1 = COPY %R0 <<< Removed 39 // 40 // or 41 // 42 // $R0 = OP ... 43 // ... // No read/clobber of $R0 and $R1 44 // $R1 = COPY $R0 // $R0 is killed 45 // Replace $R0 with $R1 and remove the COPY 46 // $R1 = OP ... 47 // ... 48 // 49 //===----------------------------------------------------------------------===// 50 51 #include "llvm/ADT/DenseMap.h" 52 #include "llvm/ADT/STLExtras.h" 53 #include "llvm/ADT/SetVector.h" 54 #include "llvm/ADT/SmallSet.h" 55 #include "llvm/ADT/SmallVector.h" 56 #include "llvm/ADT/Statistic.h" 57 #include "llvm/ADT/iterator_range.h" 58 #include "llvm/CodeGen/MachineBasicBlock.h" 59 #include "llvm/CodeGen/MachineFunction.h" 60 #include "llvm/CodeGen/MachineFunctionPass.h" 61 #include "llvm/CodeGen/MachineInstr.h" 62 #include "llvm/CodeGen/MachineOperand.h" 63 #include "llvm/CodeGen/MachineRegisterInfo.h" 64 #include "llvm/CodeGen/TargetInstrInfo.h" 65 #include "llvm/CodeGen/TargetRegisterInfo.h" 66 #include "llvm/CodeGen/TargetSubtargetInfo.h" 67 #include "llvm/InitializePasses.h" 68 #include "llvm/MC/MCRegisterInfo.h" 69 #include "llvm/Pass.h" 70 #include "llvm/Support/Debug.h" 71 #include "llvm/Support/DebugCounter.h" 72 #include "llvm/Support/raw_ostream.h" 73 #include <cassert> 74 #include <iterator> 75 76 using namespace llvm; 77 78 #define DEBUG_TYPE "machine-cp" 79 80 STATISTIC(NumDeletes, "Number of dead copies deleted"); 81 STATISTIC(NumCopyForwards, "Number of copy uses forwarded"); 82 STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated"); 83 DEBUG_COUNTER(FwdCounter, "machine-cp-fwd", 84 "Controls which register COPYs are forwarded"); 85 86 namespace { 87 88 class CopyTracker { 89 struct CopyInfo { 90 MachineInstr *MI; 91 SmallVector<unsigned, 4> DefRegs; 92 bool Avail; 93 }; 94 95 DenseMap<unsigned, CopyInfo> Copies; 96 97 public: 98 /// Mark all of the given registers and their subregisters as unavailable for 99 /// copying. 100 void markRegsUnavailable(ArrayRef<unsigned> Regs, 101 const TargetRegisterInfo &TRI) { 102 for (unsigned Reg : Regs) { 103 // Source of copy is no longer available for propagation. 104 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 105 auto CI = Copies.find(*RUI); 106 if (CI != Copies.end()) 107 CI->second.Avail = false; 108 } 109 } 110 } 111 112 /// Remove register from copy maps. 113 void invalidateRegister(unsigned Reg, const TargetRegisterInfo &TRI) { 114 // Since Reg might be a subreg of some registers, only invalidate Reg is not 115 // enough. We have to find the COPY defines Reg or registers defined by Reg 116 // and invalidate all of them. 117 SmallSet<unsigned, 8> RegsToInvalidate; 118 RegsToInvalidate.insert(Reg); 119 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 120 auto I = Copies.find(*RUI); 121 if (I != Copies.end()) { 122 if (MachineInstr *MI = I->second.MI) { 123 RegsToInvalidate.insert(MI->getOperand(0).getReg()); 124 RegsToInvalidate.insert(MI->getOperand(1).getReg()); 125 } 126 RegsToInvalidate.insert(I->second.DefRegs.begin(), 127 I->second.DefRegs.end()); 128 } 129 } 130 for (unsigned InvalidReg : RegsToInvalidate) 131 for (MCRegUnitIterator RUI(InvalidReg, &TRI); RUI.isValid(); ++RUI) 132 Copies.erase(*RUI); 133 } 134 135 /// Clobber a single register, removing it from the tracker's copy maps. 136 void clobberRegister(unsigned Reg, const TargetRegisterInfo &TRI) { 137 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 138 auto I = Copies.find(*RUI); 139 if (I != Copies.end()) { 140 // When we clobber the source of a copy, we need to clobber everything 141 // it defined. 142 markRegsUnavailable(I->second.DefRegs, TRI); 143 // When we clobber the destination of a copy, we need to clobber the 144 // whole register it defined. 145 if (MachineInstr *MI = I->second.MI) 146 markRegsUnavailable({MI->getOperand(0).getReg()}, TRI); 147 // Now we can erase the copy. 148 Copies.erase(I); 149 } 150 } 151 } 152 153 /// Add this copy's registers into the tracker's copy maps. 154 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI) { 155 assert(MI->isCopy() && "Tracking non-copy?"); 156 157 Register Def = MI->getOperand(0).getReg(); 158 Register Src = MI->getOperand(1).getReg(); 159 160 // Remember Def is defined by the copy. 161 for (MCRegUnitIterator RUI(Def, &TRI); RUI.isValid(); ++RUI) 162 Copies[*RUI] = {MI, {}, true}; 163 164 // Remember source that's copied to Def. Once it's clobbered, then 165 // it's no longer available for copy propagation. 166 for (MCRegUnitIterator RUI(Src, &TRI); RUI.isValid(); ++RUI) { 167 auto I = Copies.insert({*RUI, {nullptr, {}, false}}); 168 auto &Copy = I.first->second; 169 if (!is_contained(Copy.DefRegs, Def)) 170 Copy.DefRegs.push_back(Def); 171 } 172 } 173 174 bool hasAnyCopies() { 175 return !Copies.empty(); 176 } 177 178 MachineInstr *findCopyForUnit(unsigned RegUnit, const TargetRegisterInfo &TRI, 179 bool MustBeAvailable = false) { 180 auto CI = Copies.find(RegUnit); 181 if (CI == Copies.end()) 182 return nullptr; 183 if (MustBeAvailable && !CI->second.Avail) 184 return nullptr; 185 return CI->second.MI; 186 } 187 188 MachineInstr *findCopyDefViaUnit(unsigned RegUnit, 189 const TargetRegisterInfo &TRI) { 190 auto CI = Copies.find(RegUnit); 191 if (CI == Copies.end()) 192 return nullptr; 193 if (CI->second.DefRegs.size() != 1) 194 return nullptr; 195 MCRegUnitIterator RUI(CI->second.DefRegs[0], &TRI); 196 return findCopyForUnit(*RUI, TRI, true); 197 } 198 199 MachineInstr *findAvailBackwardCopy(MachineInstr &I, unsigned Reg, 200 const TargetRegisterInfo &TRI) { 201 MCRegUnitIterator RUI(Reg, &TRI); 202 MachineInstr *AvailCopy = findCopyDefViaUnit(*RUI, TRI); 203 if (!AvailCopy || 204 !TRI.isSubRegisterEq(AvailCopy->getOperand(1).getReg(), Reg)) 205 return nullptr; 206 207 Register AvailSrc = AvailCopy->getOperand(1).getReg(); 208 Register AvailDef = AvailCopy->getOperand(0).getReg(); 209 for (const MachineInstr &MI : 210 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator())) 211 for (const MachineOperand &MO : MI.operands()) 212 if (MO.isRegMask()) 213 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef? 214 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef)) 215 return nullptr; 216 217 return AvailCopy; 218 } 219 220 MachineInstr *findAvailCopy(MachineInstr &DestCopy, unsigned Reg, 221 const TargetRegisterInfo &TRI) { 222 // We check the first RegUnit here, since we'll only be interested in the 223 // copy if it copies the entire register anyway. 224 MCRegUnitIterator RUI(Reg, &TRI); 225 MachineInstr *AvailCopy = 226 findCopyForUnit(*RUI, TRI, /*MustBeAvailable=*/true); 227 if (!AvailCopy || 228 !TRI.isSubRegisterEq(AvailCopy->getOperand(0).getReg(), Reg)) 229 return nullptr; 230 231 // Check that the available copy isn't clobbered by any regmasks between 232 // itself and the destination. 233 Register AvailSrc = AvailCopy->getOperand(1).getReg(); 234 Register AvailDef = AvailCopy->getOperand(0).getReg(); 235 for (const MachineInstr &MI : 236 make_range(AvailCopy->getIterator(), DestCopy.getIterator())) 237 for (const MachineOperand &MO : MI.operands()) 238 if (MO.isRegMask()) 239 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef)) 240 return nullptr; 241 242 return AvailCopy; 243 } 244 245 void clear() { 246 Copies.clear(); 247 } 248 }; 249 250 class MachineCopyPropagation : public MachineFunctionPass { 251 const TargetRegisterInfo *TRI; 252 const TargetInstrInfo *TII; 253 const MachineRegisterInfo *MRI; 254 255 public: 256 static char ID; // Pass identification, replacement for typeid 257 258 MachineCopyPropagation() : MachineFunctionPass(ID) { 259 initializeMachineCopyPropagationPass(*PassRegistry::getPassRegistry()); 260 } 261 262 void getAnalysisUsage(AnalysisUsage &AU) const override { 263 AU.setPreservesCFG(); 264 MachineFunctionPass::getAnalysisUsage(AU); 265 } 266 267 bool runOnMachineFunction(MachineFunction &MF) override; 268 269 MachineFunctionProperties getRequiredProperties() const override { 270 return MachineFunctionProperties().set( 271 MachineFunctionProperties::Property::NoVRegs); 272 } 273 274 private: 275 typedef enum { DebugUse = false, RegularUse = true } DebugType; 276 277 void ClobberRegister(unsigned Reg); 278 void ReadRegister(unsigned Reg, MachineInstr &Reader, 279 DebugType DT); 280 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB); 281 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB); 282 bool eraseIfRedundant(MachineInstr &Copy, unsigned Src, unsigned Def); 283 void forwardUses(MachineInstr &MI); 284 void propagateDefs(MachineInstr &MI); 285 bool isForwardableRegClassCopy(const MachineInstr &Copy, 286 const MachineInstr &UseI, unsigned UseIdx); 287 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy, 288 const MachineInstr &UseI, 289 unsigned UseIdx); 290 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use); 291 bool hasOverlappingMultipleDef(const MachineInstr &MI, 292 const MachineOperand &MODef, Register Def); 293 294 /// Candidates for deletion. 295 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies; 296 297 /// Multimap tracking debug users in current BB 298 DenseMap<MachineInstr*, SmallVector<MachineInstr*, 2>> CopyDbgUsers; 299 300 CopyTracker Tracker; 301 302 bool Changed; 303 }; 304 305 } // end anonymous namespace 306 307 char MachineCopyPropagation::ID = 0; 308 309 char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID; 310 311 INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE, 312 "Machine Copy Propagation Pass", false, false) 313 314 void MachineCopyPropagation::ReadRegister(unsigned Reg, MachineInstr &Reader, 315 DebugType DT) { 316 // If 'Reg' is defined by a copy, the copy is no longer a candidate 317 // for elimination. If a copy is "read" by a debug user, record the user 318 // for propagation. 319 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) { 320 if (MachineInstr *Copy = Tracker.findCopyForUnit(*RUI, *TRI)) { 321 if (DT == RegularUse) { 322 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump()); 323 MaybeDeadCopies.remove(Copy); 324 } else { 325 CopyDbgUsers[Copy].push_back(&Reader); 326 } 327 } 328 } 329 } 330 331 /// Return true if \p PreviousCopy did copy register \p Src to register \p Def. 332 /// This fact may have been obscured by sub register usage or may not be true at 333 /// all even though Src and Def are subregisters of the registers used in 334 /// PreviousCopy. e.g. 335 /// isNopCopy("ecx = COPY eax", AX, CX) == true 336 /// isNopCopy("ecx = COPY eax", AH, CL) == false 337 static bool isNopCopy(const MachineInstr &PreviousCopy, unsigned Src, 338 unsigned Def, const TargetRegisterInfo *TRI) { 339 Register PreviousSrc = PreviousCopy.getOperand(1).getReg(); 340 Register PreviousDef = PreviousCopy.getOperand(0).getReg(); 341 if (Src == PreviousSrc && Def == PreviousDef) 342 return true; 343 if (!TRI->isSubRegister(PreviousSrc, Src)) 344 return false; 345 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src); 346 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def); 347 } 348 349 /// Remove instruction \p Copy if there exists a previous copy that copies the 350 /// register \p Src to the register \p Def; This may happen indirectly by 351 /// copying the super registers. 352 bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy, unsigned Src, 353 unsigned Def) { 354 // Avoid eliminating a copy from/to a reserved registers as we cannot predict 355 // the value (Example: The sparc zero register is writable but stays zero). 356 if (MRI->isReserved(Src) || MRI->isReserved(Def)) 357 return false; 358 359 // Search for an existing copy. 360 MachineInstr *PrevCopy = Tracker.findAvailCopy(Copy, Def, *TRI); 361 if (!PrevCopy) 362 return false; 363 364 // Check that the existing copy uses the correct sub registers. 365 if (PrevCopy->getOperand(0).isDead()) 366 return false; 367 if (!isNopCopy(*PrevCopy, Src, Def, TRI)) 368 return false; 369 370 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump()); 371 372 // Copy was redundantly redefining either Src or Def. Remove earlier kill 373 // flags between Copy and PrevCopy because the value will be reused now. 374 assert(Copy.isCopy()); 375 Register CopyDef = Copy.getOperand(0).getReg(); 376 assert(CopyDef == Src || CopyDef == Def); 377 for (MachineInstr &MI : 378 make_range(PrevCopy->getIterator(), Copy.getIterator())) 379 MI.clearRegisterKills(CopyDef, TRI); 380 381 Copy.eraseFromParent(); 382 Changed = true; 383 ++NumDeletes; 384 return true; 385 } 386 387 bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy( 388 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) { 389 Register Def = Copy.getOperand(0).getReg(); 390 391 if (const TargetRegisterClass *URC = 392 UseI.getRegClassConstraint(UseIdx, TII, TRI)) 393 return URC->contains(Def); 394 395 // We don't process further if UseI is a COPY, since forward copy propagation 396 // should handle that. 397 return false; 398 } 399 400 /// Decide whether we should forward the source of \param Copy to its use in 401 /// \param UseI based on the physical register class constraints of the opcode 402 /// and avoiding introducing more cross-class COPYs. 403 bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy, 404 const MachineInstr &UseI, 405 unsigned UseIdx) { 406 407 Register CopySrcReg = Copy.getOperand(1).getReg(); 408 409 // If the new register meets the opcode register constraints, then allow 410 // forwarding. 411 if (const TargetRegisterClass *URC = 412 UseI.getRegClassConstraint(UseIdx, TII, TRI)) 413 return URC->contains(CopySrcReg); 414 415 if (!UseI.isCopy()) 416 return false; 417 418 /// COPYs don't have register class constraints, so if the user instruction 419 /// is a COPY, we just try to avoid introducing additional cross-class 420 /// COPYs. For example: 421 /// 422 /// RegClassA = COPY RegClassB // Copy parameter 423 /// ... 424 /// RegClassB = COPY RegClassA // UseI parameter 425 /// 426 /// which after forwarding becomes 427 /// 428 /// RegClassA = COPY RegClassB 429 /// ... 430 /// RegClassB = COPY RegClassB 431 /// 432 /// so we have reduced the number of cross-class COPYs and potentially 433 /// introduced a nop COPY that can be removed. 434 const TargetRegisterClass *UseDstRC = 435 TRI->getMinimalPhysRegClass(UseI.getOperand(0).getReg()); 436 437 const TargetRegisterClass *SuperRC = UseDstRC; 438 for (TargetRegisterClass::sc_iterator SuperRCI = UseDstRC->getSuperClasses(); 439 SuperRC; SuperRC = *SuperRCI++) 440 if (SuperRC->contains(CopySrcReg)) 441 return true; 442 443 return false; 444 } 445 446 /// Check that \p MI does not have implicit uses that overlap with it's \p Use 447 /// operand (the register being replaced), since these can sometimes be 448 /// implicitly tied to other operands. For example, on AMDGPU: 449 /// 450 /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use> 451 /// 452 /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no 453 /// way of knowing we need to update the latter when updating the former. 454 bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI, 455 const MachineOperand &Use) { 456 for (const MachineOperand &MIUse : MI.uses()) 457 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() && 458 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg())) 459 return true; 460 461 return false; 462 } 463 464 /// For an MI that has multiple definitions, check whether \p MI has 465 /// a definition that overlaps with another of its definitions. 466 /// For example, on ARM: umull r9, r9, lr, r0 467 /// The umull instruction is unpredictable unless RdHi and RdLo are different. 468 bool MachineCopyPropagation::hasOverlappingMultipleDef( 469 const MachineInstr &MI, const MachineOperand &MODef, Register Def) { 470 for (const MachineOperand &MIDef : MI.defs()) { 471 if ((&MIDef != &MODef) && MIDef.isReg() && 472 TRI->regsOverlap(Def, MIDef.getReg())) 473 return true; 474 } 475 476 return false; 477 } 478 479 /// Look for available copies whose destination register is used by \p MI and 480 /// replace the use in \p MI with the copy's source register. 481 void MachineCopyPropagation::forwardUses(MachineInstr &MI) { 482 if (!Tracker.hasAnyCopies()) 483 return; 484 485 // Look for non-tied explicit vreg uses that have an active COPY 486 // instruction that defines the physical register allocated to them. 487 // Replace the vreg with the source of the active COPY. 488 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd; 489 ++OpIdx) { 490 MachineOperand &MOUse = MI.getOperand(OpIdx); 491 // Don't forward into undef use operands since doing so can cause problems 492 // with the machine verifier, since it doesn't treat undef reads as reads, 493 // so we can end up with a live range that ends on an undef read, leading to 494 // an error that the live range doesn't end on a read of the live range 495 // register. 496 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() || 497 MOUse.isImplicit()) 498 continue; 499 500 if (!MOUse.getReg()) 501 continue; 502 503 // Check that the register is marked 'renamable' so we know it is safe to 504 // rename it without violating any constraints that aren't expressed in the 505 // IR (e.g. ABI or opcode requirements). 506 if (!MOUse.isRenamable()) 507 continue; 508 509 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg(), *TRI); 510 if (!Copy) 511 continue; 512 513 Register CopyDstReg = Copy->getOperand(0).getReg(); 514 const MachineOperand &CopySrc = Copy->getOperand(1); 515 Register CopySrcReg = CopySrc.getReg(); 516 517 // FIXME: Don't handle partial uses of wider COPYs yet. 518 if (MOUse.getReg() != CopyDstReg) { 519 LLVM_DEBUG( 520 dbgs() << "MCP: FIXME! Not forwarding COPY to sub-register use:\n " 521 << MI); 522 continue; 523 } 524 525 // Don't forward COPYs of reserved regs unless they are constant. 526 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg)) 527 continue; 528 529 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx)) 530 continue; 531 532 if (hasImplicitOverlap(MI, MOUse)) 533 continue; 534 535 // Check that the instruction is not a copy that partially overwrites the 536 // original copy source that we are about to use. The tracker mechanism 537 // cannot cope with that. 538 if (MI.isCopy() && MI.modifiesRegister(CopySrcReg, TRI) && 539 !MI.definesRegister(CopySrcReg)) { 540 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI); 541 continue; 542 } 543 544 if (!DebugCounter::shouldExecute(FwdCounter)) { 545 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n " 546 << MI); 547 continue; 548 } 549 550 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI) 551 << "\n with " << printReg(CopySrcReg, TRI) 552 << "\n in " << MI << " from " << *Copy); 553 554 MOUse.setReg(CopySrcReg); 555 if (!CopySrc.isRenamable()) 556 MOUse.setIsRenamable(false); 557 558 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n"); 559 560 // Clear kill markers that may have been invalidated. 561 for (MachineInstr &KMI : 562 make_range(Copy->getIterator(), std::next(MI.getIterator()))) 563 KMI.clearRegisterKills(CopySrcReg, TRI); 564 565 ++NumCopyForwards; 566 Changed = true; 567 } 568 } 569 570 void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) { 571 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName() 572 << "\n"); 573 574 for (MachineBasicBlock::iterator I = MBB.begin(), E = MBB.end(); I != E; ) { 575 MachineInstr *MI = &*I; 576 ++I; 577 578 // Analyze copies (which don't overlap themselves). 579 if (MI->isCopy() && !TRI->regsOverlap(MI->getOperand(0).getReg(), 580 MI->getOperand(1).getReg())) { 581 Register Def = MI->getOperand(0).getReg(); 582 Register Src = MI->getOperand(1).getReg(); 583 584 assert(!Register::isVirtualRegister(Def) && 585 !Register::isVirtualRegister(Src) && 586 "MachineCopyPropagation should be run after register allocation!"); 587 588 // The two copies cancel out and the source of the first copy 589 // hasn't been overridden, eliminate the second one. e.g. 590 // %ecx = COPY %eax 591 // ... nothing clobbered eax. 592 // %eax = COPY %ecx 593 // => 594 // %ecx = COPY %eax 595 // 596 // or 597 // 598 // %ecx = COPY %eax 599 // ... nothing clobbered eax. 600 // %ecx = COPY %eax 601 // => 602 // %ecx = COPY %eax 603 if (eraseIfRedundant(*MI, Def, Src) || eraseIfRedundant(*MI, Src, Def)) 604 continue; 605 606 forwardUses(*MI); 607 608 // Src may have been changed by forwardUses() 609 Src = MI->getOperand(1).getReg(); 610 611 // If Src is defined by a previous copy, the previous copy cannot be 612 // eliminated. 613 ReadRegister(Src, *MI, RegularUse); 614 for (const MachineOperand &MO : MI->implicit_operands()) { 615 if (!MO.isReg() || !MO.readsReg()) 616 continue; 617 Register Reg = MO.getReg(); 618 if (!Reg) 619 continue; 620 ReadRegister(Reg, *MI, RegularUse); 621 } 622 623 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI->dump()); 624 625 // Copy is now a candidate for deletion. 626 if (!MRI->isReserved(Def)) 627 MaybeDeadCopies.insert(MI); 628 629 // If 'Def' is previously source of another copy, then this earlier copy's 630 // source is no longer available. e.g. 631 // %xmm9 = copy %xmm2 632 // ... 633 // %xmm2 = copy %xmm0 634 // ... 635 // %xmm2 = copy %xmm9 636 Tracker.clobberRegister(Def, *TRI); 637 for (const MachineOperand &MO : MI->implicit_operands()) { 638 if (!MO.isReg() || !MO.isDef()) 639 continue; 640 Register Reg = MO.getReg(); 641 if (!Reg) 642 continue; 643 Tracker.clobberRegister(Reg, *TRI); 644 } 645 646 Tracker.trackCopy(MI, *TRI); 647 648 continue; 649 } 650 651 // Clobber any earlyclobber regs first. 652 for (const MachineOperand &MO : MI->operands()) 653 if (MO.isReg() && MO.isEarlyClobber()) { 654 Register Reg = MO.getReg(); 655 // If we have a tied earlyclobber, that means it is also read by this 656 // instruction, so we need to make sure we don't remove it as dead 657 // later. 658 if (MO.isTied()) 659 ReadRegister(Reg, *MI, RegularUse); 660 Tracker.clobberRegister(Reg, *TRI); 661 } 662 663 forwardUses(*MI); 664 665 // Not a copy. 666 SmallVector<unsigned, 2> Defs; 667 const MachineOperand *RegMask = nullptr; 668 for (const MachineOperand &MO : MI->operands()) { 669 if (MO.isRegMask()) 670 RegMask = &MO; 671 if (!MO.isReg()) 672 continue; 673 Register Reg = MO.getReg(); 674 if (!Reg) 675 continue; 676 677 assert(!Register::isVirtualRegister(Reg) && 678 "MachineCopyPropagation should be run after register allocation!"); 679 680 if (MO.isDef() && !MO.isEarlyClobber()) { 681 Defs.push_back(Reg); 682 continue; 683 } else if (MO.readsReg()) 684 ReadRegister(Reg, *MI, MO.isDebug() ? DebugUse : RegularUse); 685 } 686 687 // The instruction has a register mask operand which means that it clobbers 688 // a large set of registers. Treat clobbered registers the same way as 689 // defined registers. 690 if (RegMask) { 691 // Erase any MaybeDeadCopies whose destination register is clobbered. 692 for (SmallSetVector<MachineInstr *, 8>::iterator DI = 693 MaybeDeadCopies.begin(); 694 DI != MaybeDeadCopies.end();) { 695 MachineInstr *MaybeDead = *DI; 696 Register Reg = MaybeDead->getOperand(0).getReg(); 697 assert(!MRI->isReserved(Reg)); 698 699 if (!RegMask->clobbersPhysReg(Reg)) { 700 ++DI; 701 continue; 702 } 703 704 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "; 705 MaybeDead->dump()); 706 707 // Make sure we invalidate any entries in the copy maps before erasing 708 // the instruction. 709 Tracker.clobberRegister(Reg, *TRI); 710 711 // erase() will return the next valid iterator pointing to the next 712 // element after the erased one. 713 DI = MaybeDeadCopies.erase(DI); 714 MaybeDead->eraseFromParent(); 715 Changed = true; 716 ++NumDeletes; 717 } 718 } 719 720 // Any previous copy definition or reading the Defs is no longer available. 721 for (unsigned Reg : Defs) 722 Tracker.clobberRegister(Reg, *TRI); 723 } 724 725 // If MBB doesn't have successors, delete the copies whose defs are not used. 726 // If MBB does have successors, then conservative assume the defs are live-out 727 // since we don't want to trust live-in lists. 728 if (MBB.succ_empty()) { 729 for (MachineInstr *MaybeDead : MaybeDeadCopies) { 730 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: "; 731 MaybeDead->dump()); 732 assert(!MRI->isReserved(MaybeDead->getOperand(0).getReg())); 733 734 // Update matching debug values, if any. 735 assert(MaybeDead->isCopy()); 736 unsigned SrcReg = MaybeDead->getOperand(1).getReg(); 737 MRI->updateDbgUsersToReg(SrcReg, CopyDbgUsers[MaybeDead]); 738 739 MaybeDead->eraseFromParent(); 740 Changed = true; 741 ++NumDeletes; 742 } 743 } 744 745 MaybeDeadCopies.clear(); 746 CopyDbgUsers.clear(); 747 Tracker.clear(); 748 } 749 750 static bool isBackwardPropagatableCopy(MachineInstr &MI, 751 const MachineRegisterInfo &MRI) { 752 assert(MI.isCopy() && "MI is expected to be a COPY"); 753 Register Def = MI.getOperand(0).getReg(); 754 Register Src = MI.getOperand(1).getReg(); 755 756 if (!Def || !Src) 757 return false; 758 759 if (MRI.isReserved(Def) || MRI.isReserved(Src)) 760 return false; 761 762 return MI.getOperand(1).isRenamable() && MI.getOperand(1).isKill(); 763 } 764 765 void MachineCopyPropagation::propagateDefs(MachineInstr &MI) { 766 if (!Tracker.hasAnyCopies()) 767 return; 768 769 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd; 770 ++OpIdx) { 771 MachineOperand &MODef = MI.getOperand(OpIdx); 772 773 if (!MODef.isReg() || MODef.isUse()) 774 continue; 775 776 // Ignore non-trivial cases. 777 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit()) 778 continue; 779 780 if (!MODef.getReg()) 781 continue; 782 783 // We only handle if the register comes from a vreg. 784 if (!MODef.isRenamable()) 785 continue; 786 787 MachineInstr *Copy = 788 Tracker.findAvailBackwardCopy(MI, MODef.getReg(), *TRI); 789 if (!Copy) 790 continue; 791 792 Register Def = Copy->getOperand(0).getReg(); 793 Register Src = Copy->getOperand(1).getReg(); 794 795 if (MODef.getReg() != Src) 796 continue; 797 798 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx)) 799 continue; 800 801 if (hasImplicitOverlap(MI, MODef)) 802 continue; 803 804 if (hasOverlappingMultipleDef(MI, MODef, Def)) 805 continue; 806 807 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI) 808 << "\n with " << printReg(Def, TRI) << "\n in " 809 << MI << " from " << *Copy); 810 811 MODef.setReg(Def); 812 MODef.setIsRenamable(Copy->getOperand(0).isRenamable()); 813 814 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n"); 815 MaybeDeadCopies.insert(Copy); 816 Changed = true; 817 ++NumCopyBackwardPropagated; 818 } 819 } 820 821 void MachineCopyPropagation::BackwardCopyPropagateBlock( 822 MachineBasicBlock &MBB) { 823 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName() 824 << "\n"); 825 826 for (MachineBasicBlock::reverse_iterator I = MBB.rbegin(), E = MBB.rend(); 827 I != E;) { 828 MachineInstr *MI = &*I; 829 ++I; 830 831 // Ignore non-trivial COPYs. 832 if (MI->isCopy() && MI->getNumOperands() == 2 && 833 !TRI->regsOverlap(MI->getOperand(0).getReg(), 834 MI->getOperand(1).getReg())) { 835 836 Register Def = MI->getOperand(0).getReg(); 837 Register Src = MI->getOperand(1).getReg(); 838 839 // Unlike forward cp, we don't invoke propagateDefs here, 840 // just let forward cp do COPY-to-COPY propagation. 841 if (isBackwardPropagatableCopy(*MI, *MRI)) { 842 Tracker.invalidateRegister(Src, *TRI); 843 Tracker.invalidateRegister(Def, *TRI); 844 Tracker.trackCopy(MI, *TRI); 845 continue; 846 } 847 } 848 849 // Invalidate any earlyclobber regs first. 850 for (const MachineOperand &MO : MI->operands()) 851 if (MO.isReg() && MO.isEarlyClobber()) { 852 Register Reg = MO.getReg(); 853 if (!Reg) 854 continue; 855 Tracker.invalidateRegister(Reg, *TRI); 856 } 857 858 propagateDefs(*MI); 859 for (const MachineOperand &MO : MI->operands()) { 860 if (!MO.isReg()) 861 continue; 862 863 if (!MO.getReg()) 864 continue; 865 866 if (MO.isDef()) 867 Tracker.invalidateRegister(MO.getReg(), *TRI); 868 869 if (MO.readsReg()) 870 Tracker.invalidateRegister(MO.getReg(), *TRI); 871 } 872 } 873 874 for (auto *Copy : MaybeDeadCopies) { 875 Copy->eraseFromParent(); 876 ++NumDeletes; 877 } 878 879 MaybeDeadCopies.clear(); 880 CopyDbgUsers.clear(); 881 Tracker.clear(); 882 } 883 884 bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) { 885 if (skipFunction(MF.getFunction())) 886 return false; 887 888 Changed = false; 889 890 TRI = MF.getSubtarget().getRegisterInfo(); 891 TII = MF.getSubtarget().getInstrInfo(); 892 MRI = &MF.getRegInfo(); 893 894 for (MachineBasicBlock &MBB : MF) { 895 BackwardCopyPropagateBlock(MBB); 896 ForwardCopyPropagateBlock(MBB); 897 } 898 899 return Changed; 900 } 901