1 //===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// 10 /// \file 11 /// \brief This file implements a register stacking pass. 12 /// 13 /// This pass reorders instructions to put register uses and defs in an order 14 /// such that they form single-use expression trees. Registers fitting this form 15 /// are then marked as "stackified", meaning references to them are replaced by 16 /// "push" and "pop" from the stack. 17 /// 18 /// This is primarily a code size optimization, since temporary values on the 19 /// expression don't need to be named. 20 /// 21 //===----------------------------------------------------------------------===// 22 23 #include "WebAssembly.h" 24 #include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_* 25 #include "WebAssemblyMachineFunctionInfo.h" 26 #include "WebAssemblySubtarget.h" 27 #include "llvm/Analysis/AliasAnalysis.h" 28 #include "llvm/CodeGen/LiveIntervalAnalysis.h" 29 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h" 30 #include "llvm/CodeGen/MachineDominators.h" 31 #include "llvm/CodeGen/MachineInstrBuilder.h" 32 #include "llvm/CodeGen/MachineRegisterInfo.h" 33 #include "llvm/CodeGen/Passes.h" 34 #include "llvm/Support/Debug.h" 35 #include "llvm/Support/raw_ostream.h" 36 using namespace llvm; 37 38 #define DEBUG_TYPE "wasm-reg-stackify" 39 40 namespace { 41 class WebAssemblyRegStackify final : public MachineFunctionPass { 42 const char *getPassName() const override { 43 return "WebAssembly Register Stackify"; 44 } 45 46 void getAnalysisUsage(AnalysisUsage &AU) const override { 47 AU.setPreservesCFG(); 48 AU.addRequired<AAResultsWrapperPass>(); 49 AU.addRequired<MachineDominatorTree>(); 50 AU.addRequired<LiveIntervals>(); 51 AU.addPreserved<MachineBlockFrequencyInfo>(); 52 AU.addPreserved<SlotIndexes>(); 53 AU.addPreserved<LiveIntervals>(); 54 AU.addPreservedID(LiveVariablesID); 55 AU.addPreserved<MachineDominatorTree>(); 56 MachineFunctionPass::getAnalysisUsage(AU); 57 } 58 59 bool runOnMachineFunction(MachineFunction &MF) override; 60 61 public: 62 static char ID; // Pass identification, replacement for typeid 63 WebAssemblyRegStackify() : MachineFunctionPass(ID) {} 64 }; 65 } // end anonymous namespace 66 67 char WebAssemblyRegStackify::ID = 0; 68 FunctionPass *llvm::createWebAssemblyRegStackify() { 69 return new WebAssemblyRegStackify(); 70 } 71 72 // Decorate the given instruction with implicit operands that enforce the 73 // expression stack ordering constraints for an instruction which is on 74 // the expression stack. 75 static void ImposeStackOrdering(MachineInstr *MI) { 76 // Write the opaque EXPR_STACK register. 77 if (!MI->definesRegister(WebAssembly::EXPR_STACK)) 78 MI->addOperand(MachineOperand::CreateReg(WebAssembly::EXPR_STACK, 79 /*isDef=*/true, 80 /*isImp=*/true)); 81 82 // Also read the opaque EXPR_STACK register. 83 if (!MI->readsRegister(WebAssembly::EXPR_STACK)) 84 MI->addOperand(MachineOperand::CreateReg(WebAssembly::EXPR_STACK, 85 /*isDef=*/false, 86 /*isImp=*/true)); 87 } 88 89 // Determine whether a call to the callee referenced by 90 // MI->getOperand(CalleeOpNo) reads memory, writes memory, and/or has side 91 // effects. 92 static void QueryCallee(const MachineInstr *MI, unsigned CalleeOpNo, 93 bool &Read, bool &Write, bool &Effects) { 94 const MachineOperand &MO = MI->getOperand(CalleeOpNo); 95 if (MO.isGlobal()) { 96 const Constant *GV = MO.getGlobal(); 97 if (const GlobalAlias *GA = dyn_cast<GlobalAlias>(GV)) 98 if (!GA->isInterposable()) 99 GV = GA->getAliasee(); 100 101 if (const Function *F = dyn_cast<Function>(GV)) { 102 if (!F->doesNotThrow()) 103 Effects = true; 104 if (F->doesNotAccessMemory()) 105 return; 106 if (F->onlyReadsMemory()) { 107 Read = true; 108 return; 109 } 110 } 111 } 112 113 // Assume the worst. 114 Write = true; 115 Read = true; 116 Effects = true; 117 } 118 119 // Determine whether MI reads memory, writes memory, and/or has side 120 // effects. 121 static void Query(const MachineInstr *MI, AliasAnalysis &AA, 122 bool &Read, bool &Write, bool &Effects) { 123 assert(!MI->isPosition()); 124 assert(!MI->isTerminator()); 125 assert(!MI->isDebugValue()); 126 127 // Check for loads. 128 if (MI->mayLoad() && !MI->isInvariantLoad(&AA)) 129 Read = true; 130 131 // Check for stores. 132 if (MI->mayStore()) 133 Write = true; 134 else if (MI->hasOrderedMemoryRef()) { 135 switch (MI->getOpcode()) { 136 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64: 137 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64: 138 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64: 139 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64: 140 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32: 141 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64: 142 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32: 143 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64: 144 // These instruction have hasUnmodeledSideEffects() returning true 145 // because they trap on overflow and invalid so they can't be arbitrarily 146 // moved, however hasOrderedMemoryRef() interprets this plus their lack 147 // of memoperands as having a potential unknown memory reference. 148 break; 149 default: 150 // Record potential stores, unless it's a call, as calls are handled 151 // specially below. 152 if (!MI->isCall()) 153 Write = true; 154 break; 155 } 156 } 157 158 // Check for side effects. 159 if (MI->hasUnmodeledSideEffects()) { 160 switch (MI->getOpcode()) { 161 case WebAssembly::DIV_S_I32: case WebAssembly::DIV_S_I64: 162 case WebAssembly::REM_S_I32: case WebAssembly::REM_S_I64: 163 case WebAssembly::DIV_U_I32: case WebAssembly::DIV_U_I64: 164 case WebAssembly::REM_U_I32: case WebAssembly::REM_U_I64: 165 case WebAssembly::I32_TRUNC_S_F32: case WebAssembly::I64_TRUNC_S_F32: 166 case WebAssembly::I32_TRUNC_S_F64: case WebAssembly::I64_TRUNC_S_F64: 167 case WebAssembly::I32_TRUNC_U_F32: case WebAssembly::I64_TRUNC_U_F32: 168 case WebAssembly::I32_TRUNC_U_F64: case WebAssembly::I64_TRUNC_U_F64: 169 // These instructions have hasUnmodeledSideEffects() returning true 170 // because they trap on overflow and invalid so they can't be arbitrarily 171 // moved, however in the specific case of register stackifying, it is safe 172 // to move them because overflow and invalid are Undefined Behavior. 173 break; 174 default: 175 Effects = true; 176 break; 177 } 178 } 179 180 // Analyze calls. 181 if (MI->isCall()) { 182 switch (MI->getOpcode()) { 183 case WebAssembly::CALL_VOID: 184 QueryCallee(MI, 0, Read, Write, Effects); 185 break; 186 case WebAssembly::CALL_I32: 187 case WebAssembly::CALL_I64: 188 case WebAssembly::CALL_F32: 189 case WebAssembly::CALL_F64: 190 QueryCallee(MI, 1, Read, Write, Effects); 191 break; 192 case WebAssembly::CALL_INDIRECT_VOID: 193 case WebAssembly::CALL_INDIRECT_I32: 194 case WebAssembly::CALL_INDIRECT_I64: 195 case WebAssembly::CALL_INDIRECT_F32: 196 case WebAssembly::CALL_INDIRECT_F64: 197 Read = true; 198 Write = true; 199 Effects = true; 200 break; 201 default: 202 llvm_unreachable("unexpected call opcode"); 203 } 204 } 205 } 206 207 // Test whether Def is safe and profitable to rematerialize. 208 static bool ShouldRematerialize(const MachineInstr *Def, AliasAnalysis &AA, 209 const WebAssemblyInstrInfo *TII) { 210 return Def->isAsCheapAsAMove() && 211 TII->isTriviallyReMaterializable(Def, &AA); 212 } 213 214 /// Identify the definition for this register at this point. 215 static MachineInstr *GetVRegDef(unsigned Reg, const MachineInstr *Insert, 216 const MachineRegisterInfo &MRI, 217 const LiveIntervals &LIS) 218 { 219 // Most registers are in SSA form here so we try a quick MRI query first. 220 if (MachineInstr *Def = MRI.getUniqueVRegDef(Reg)) 221 return Def; 222 223 // MRI doesn't know what the Def is. Try asking LIS. 224 if (const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore( 225 LIS.getInstructionIndex(*Insert))) 226 return LIS.getInstructionFromIndex(ValNo->def); 227 228 return nullptr; 229 } 230 231 // Test whether it's safe to move Def to just before Insert. 232 // TODO: Compute memory dependencies in a way that doesn't require always 233 // walking the block. 234 // TODO: Compute memory dependencies in a way that uses AliasAnalysis to be 235 // more precise. 236 static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert, 237 AliasAnalysis &AA, const LiveIntervals &LIS, 238 const MachineRegisterInfo &MRI) { 239 assert(Def->getParent() == Insert->getParent()); 240 241 // Check for register dependencies. 242 for (const MachineOperand &MO : Def->operands()) { 243 if (!MO.isReg() || MO.isUndef()) 244 continue; 245 unsigned Reg = MO.getReg(); 246 247 // If the register is dead here and at Insert, ignore it. 248 if (MO.isDead() && Insert->definesRegister(Reg) && 249 !Insert->readsRegister(Reg)) 250 continue; 251 252 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 253 // Ignore ARGUMENTS; it's just used to keep the ARGUMENT_* instructions 254 // from moving down, and we've already checked for that. 255 if (Reg == WebAssembly::ARGUMENTS) 256 continue; 257 // If the physical register is never modified, ignore it. 258 if (!MRI.isPhysRegModified(Reg)) 259 continue; 260 // Otherwise, it's a physical register with unknown liveness. 261 return false; 262 } 263 264 // Ask LiveIntervals whether moving this virtual register use or def to 265 // Insert will change which value numbers are seen. 266 const LiveInterval &LI = LIS.getInterval(Reg); 267 VNInfo *DefVNI = 268 MO.isDef() ? LI.getVNInfoAt(LIS.getInstructionIndex(*Def).getRegSlot()) 269 : LI.getVNInfoBefore(LIS.getInstructionIndex(*Def)); 270 assert(DefVNI && "Instruction input missing value number"); 271 VNInfo *InsVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*Insert)); 272 if (InsVNI && DefVNI != InsVNI) 273 return false; 274 } 275 276 bool Read = false, Write = false, Effects = false; 277 Query(Def, AA, Read, Write, Effects); 278 279 // If the instruction does not access memory and has no side effects, it has 280 // no additional dependencies. 281 if (!Read && !Write && !Effects) 282 return true; 283 284 // Scan through the intervening instructions between Def and Insert. 285 MachineBasicBlock::const_iterator D(Def), I(Insert); 286 for (--I; I != D; --I) { 287 bool InterveningRead = false; 288 bool InterveningWrite = false; 289 bool InterveningEffects = false; 290 Query(I, AA, InterveningRead, InterveningWrite, InterveningEffects); 291 if (Effects && InterveningEffects) 292 return false; 293 if (Read && InterveningWrite) 294 return false; 295 if (Write && (InterveningRead || InterveningWrite)) 296 return false; 297 } 298 299 return true; 300 } 301 302 /// Test whether OneUse, a use of Reg, dominates all of Reg's other uses. 303 static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse, 304 const MachineBasicBlock &MBB, 305 const MachineRegisterInfo &MRI, 306 const MachineDominatorTree &MDT, 307 LiveIntervals &LIS) { 308 const LiveInterval &LI = LIS.getInterval(Reg); 309 310 const MachineInstr *OneUseInst = OneUse.getParent(); 311 VNInfo *OneUseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*OneUseInst)); 312 313 for (const MachineOperand &Use : MRI.use_operands(Reg)) { 314 if (&Use == &OneUse) 315 continue; 316 317 const MachineInstr *UseInst = Use.getParent(); 318 VNInfo *UseVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*UseInst)); 319 320 if (UseVNI != OneUseVNI) 321 continue; 322 323 const MachineInstr *OneUseInst = OneUse.getParent(); 324 if (UseInst->getOpcode() == TargetOpcode::PHI) { 325 // Test that the PHI use, which happens on the CFG edge rather than 326 // within the PHI's own block, is dominated by the one selected use. 327 const MachineBasicBlock *Pred = 328 UseInst->getOperand(&Use - &UseInst->getOperand(0) + 1).getMBB(); 329 if (!MDT.dominates(&MBB, Pred)) 330 return false; 331 } else if (UseInst == OneUseInst) { 332 // Another use in the same instruction. We need to ensure that the one 333 // selected use happens "before" it. 334 if (&OneUse > &Use) 335 return false; 336 } else { 337 // Test that the use is dominated by the one selected use. 338 if (!MDT.dominates(OneUseInst, UseInst)) 339 return false; 340 } 341 } 342 return true; 343 } 344 345 /// Get the appropriate tee_local opcode for the given register class. 346 static unsigned GetTeeLocalOpcode(const TargetRegisterClass *RC) { 347 if (RC == &WebAssembly::I32RegClass) 348 return WebAssembly::TEE_LOCAL_I32; 349 if (RC == &WebAssembly::I64RegClass) 350 return WebAssembly::TEE_LOCAL_I64; 351 if (RC == &WebAssembly::F32RegClass) 352 return WebAssembly::TEE_LOCAL_F32; 353 if (RC == &WebAssembly::F64RegClass) 354 return WebAssembly::TEE_LOCAL_F64; 355 llvm_unreachable("Unexpected register class"); 356 } 357 358 // Shrink LI to its uses, cleaning up LI. 359 static void ShrinkToUses(LiveInterval &LI, LiveIntervals &LIS) { 360 if (LIS.shrinkToUses(&LI)) { 361 SmallVector<LiveInterval*, 4> SplitLIs; 362 LIS.splitSeparateComponents(LI, SplitLIs); 363 } 364 } 365 366 /// A single-use def in the same block with no intervening memory or register 367 /// dependencies; move the def down and nest it with the current instruction. 368 static MachineInstr *MoveForSingleUse(unsigned Reg, MachineOperand& Op, 369 MachineInstr *Def, 370 MachineBasicBlock &MBB, 371 MachineInstr *Insert, LiveIntervals &LIS, 372 WebAssemblyFunctionInfo &MFI, 373 MachineRegisterInfo &MRI) { 374 DEBUG(dbgs() << "Move for single use: "; Def->dump()); 375 376 MBB.splice(Insert, &MBB, Def); 377 LIS.handleMove(*Def); 378 379 if (MRI.hasOneDef(Reg)) { 380 MFI.stackifyVReg(Reg); 381 } else { 382 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg)); 383 Def->getOperand(0).setReg(NewReg); 384 Op.setReg(NewReg); 385 386 // Tell LiveIntervals about the new register. 387 LIS.createAndComputeVirtRegInterval(NewReg); 388 389 // Tell LiveIntervals about the changes to the old register. 390 LiveInterval &LI = LIS.getInterval(Reg); 391 LIS.removeVRegDefAt(LI, LIS.getInstructionIndex(*Def).getRegSlot()); 392 ShrinkToUses(LI, LIS); 393 394 MFI.stackifyVReg(NewReg); 395 396 DEBUG(dbgs() << " - Replaced register: "; Def->dump()); 397 } 398 399 ImposeStackOrdering(Def); 400 return Def; 401 } 402 403 /// A trivially cloneable instruction; clone it and nest the new copy with the 404 /// current instruction. 405 static MachineInstr * 406 RematerializeCheapDef(unsigned Reg, MachineOperand &Op, MachineInstr *Def, 407 MachineBasicBlock &MBB, MachineInstr *Insert, 408 LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI, 409 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII, 410 const WebAssemblyRegisterInfo *TRI) { 411 DEBUG(dbgs() << "Rematerializing cheap def: "; Def->dump()); 412 DEBUG(dbgs() << " - for use in "; Op.getParent()->dump()); 413 414 unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg)); 415 TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI); 416 Op.setReg(NewReg); 417 MachineInstr *Clone = &*std::prev(MachineBasicBlock::instr_iterator(Insert)); 418 LIS.InsertMachineInstrInMaps(*Clone); 419 LIS.createAndComputeVirtRegInterval(NewReg); 420 MFI.stackifyVReg(NewReg); 421 ImposeStackOrdering(Clone); 422 423 DEBUG(dbgs() << " - Cloned to "; Clone->dump()); 424 425 // Shrink the interval. 426 bool IsDead = MRI.use_empty(Reg); 427 if (!IsDead) { 428 LiveInterval &LI = LIS.getInterval(Reg); 429 ShrinkToUses(LI, LIS); 430 IsDead = !LI.liveAt(LIS.getInstructionIndex(*Def).getDeadSlot()); 431 } 432 433 // If that was the last use of the original, delete the original. 434 if (IsDead) { 435 DEBUG(dbgs() << " - Deleting original\n"); 436 SlotIndex Idx = LIS.getInstructionIndex(*Def).getRegSlot(); 437 LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx); 438 LIS.removeInterval(Reg); 439 LIS.RemoveMachineInstrFromMaps(*Def); 440 Def->eraseFromParent(); 441 } 442 443 return Clone; 444 } 445 446 /// A multiple-use def in the same block with no intervening memory or register 447 /// dependencies; move the def down, nest it with the current instruction, and 448 /// insert a tee_local to satisfy the rest of the uses. As an illustration, 449 /// rewrite this: 450 /// 451 /// Reg = INST ... // Def 452 /// INST ..., Reg, ... // Insert 453 /// INST ..., Reg, ... 454 /// INST ..., Reg, ... 455 /// 456 /// to this: 457 /// 458 /// DefReg = INST ... // Def (to become the new Insert) 459 /// TeeReg, NewReg = TEE_LOCAL_... DefReg 460 /// INST ..., TeeReg, ... // Insert 461 /// INST ..., NewReg, ... 462 /// INST ..., NewReg, ... 463 /// 464 /// with DefReg and TeeReg stackified. This eliminates a get_local from the 465 /// resulting code. 466 static MachineInstr *MoveAndTeeForMultiUse( 467 unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB, 468 MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI, 469 MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) { 470 DEBUG(dbgs() << "Move and tee for multi-use:"; Def->dump()); 471 472 MBB.splice(Insert, &MBB, Def); 473 LIS.handleMove(*Def); 474 const auto *RegClass = MRI.getRegClass(Reg); 475 unsigned NewReg = MRI.createVirtualRegister(RegClass); 476 unsigned TeeReg = MRI.createVirtualRegister(RegClass); 477 unsigned DefReg = MRI.createVirtualRegister(RegClass); 478 MachineOperand &DefMO = Def->getOperand(0); 479 MRI.replaceRegWith(Reg, NewReg); 480 MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(), 481 TII->get(GetTeeLocalOpcode(RegClass)), TeeReg) 482 .addReg(NewReg, RegState::Define) 483 .addReg(DefReg, getUndefRegState(DefMO.isDead())); 484 Op.setReg(TeeReg); 485 DefMO.setReg(DefReg); 486 LIS.InsertMachineInstrInMaps(*Tee); 487 LIS.removeInterval(Reg); 488 LIS.createAndComputeVirtRegInterval(NewReg); 489 LIS.createAndComputeVirtRegInterval(TeeReg); 490 LIS.createAndComputeVirtRegInterval(DefReg); 491 MFI.stackifyVReg(DefReg); 492 MFI.stackifyVReg(TeeReg); 493 ImposeStackOrdering(Def); 494 ImposeStackOrdering(Tee); 495 return Def; 496 } 497 498 namespace { 499 /// A stack for walking the tree of instructions being built, visiting the 500 /// MachineOperands in DFS order. 501 class TreeWalkerState { 502 typedef MachineInstr::mop_iterator mop_iterator; 503 typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator; 504 typedef iterator_range<mop_reverse_iterator> RangeTy; 505 SmallVector<RangeTy, 4> Worklist; 506 507 public: 508 explicit TreeWalkerState(MachineInstr *Insert) { 509 const iterator_range<mop_iterator> &Range = Insert->explicit_uses(); 510 if (Range.begin() != Range.end()) 511 Worklist.push_back(reverse(Range)); 512 } 513 514 bool Done() const { return Worklist.empty(); } 515 516 MachineOperand &Pop() { 517 RangeTy &Range = Worklist.back(); 518 MachineOperand &Op = *Range.begin(); 519 Range = drop_begin(Range, 1); 520 if (Range.begin() == Range.end()) 521 Worklist.pop_back(); 522 assert((Worklist.empty() || 523 Worklist.back().begin() != Worklist.back().end()) && 524 "Empty ranges shouldn't remain in the worklist"); 525 return Op; 526 } 527 528 /// Push Instr's operands onto the stack to be visited. 529 void PushOperands(MachineInstr *Instr) { 530 const iterator_range<mop_iterator> &Range(Instr->explicit_uses()); 531 if (Range.begin() != Range.end()) 532 Worklist.push_back(reverse(Range)); 533 } 534 535 /// Some of Instr's operands are on the top of the stack; remove them and 536 /// re-insert them starting from the beginning (because we've commuted them). 537 void ResetTopOperands(MachineInstr *Instr) { 538 assert(HasRemainingOperands(Instr) && 539 "Reseting operands should only be done when the instruction has " 540 "an operand still on the stack"); 541 Worklist.back() = reverse(Instr->explicit_uses()); 542 } 543 544 /// Test whether Instr has operands remaining to be visited at the top of 545 /// the stack. 546 bool HasRemainingOperands(const MachineInstr *Instr) const { 547 if (Worklist.empty()) 548 return false; 549 const RangeTy &Range = Worklist.back(); 550 return Range.begin() != Range.end() && Range.begin()->getParent() == Instr; 551 } 552 553 /// Test whether the given register is present on the stack, indicating an 554 /// operand in the tree that we haven't visited yet. Moving a definition of 555 /// Reg to a point in the tree after that would change its value. 556 bool IsOnStack(unsigned Reg) const { 557 for (const RangeTy &Range : Worklist) 558 for (const MachineOperand &MO : Range) 559 if (MO.isReg() && MO.getReg() == Reg) 560 return true; 561 return false; 562 } 563 }; 564 565 /// State to keep track of whether commuting is in flight or whether it's been 566 /// tried for the current instruction and didn't work. 567 class CommutingState { 568 /// There are effectively three states: the initial state where we haven't 569 /// started commuting anything and we don't know anything yet, the tenative 570 /// state where we've commuted the operands of the current instruction and are 571 /// revisting it, and the declined state where we've reverted the operands 572 /// back to their original order and will no longer commute it further. 573 bool TentativelyCommuting; 574 bool Declined; 575 576 /// During the tentative state, these hold the operand indices of the commuted 577 /// operands. 578 unsigned Operand0, Operand1; 579 580 public: 581 CommutingState() : TentativelyCommuting(false), Declined(false) {} 582 583 /// Stackification for an operand was not successful due to ordering 584 /// constraints. If possible, and if we haven't already tried it and declined 585 /// it, commute Insert's operands and prepare to revisit it. 586 void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker, 587 const WebAssemblyInstrInfo *TII) { 588 if (TentativelyCommuting) { 589 assert(!Declined && 590 "Don't decline commuting until you've finished trying it"); 591 // Commuting didn't help. Revert it. 592 TII->commuteInstruction(Insert, /*NewMI=*/false, Operand0, Operand1); 593 TentativelyCommuting = false; 594 Declined = true; 595 } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) { 596 Operand0 = TargetInstrInfo::CommuteAnyOperandIndex; 597 Operand1 = TargetInstrInfo::CommuteAnyOperandIndex; 598 if (TII->findCommutedOpIndices(Insert, Operand0, Operand1)) { 599 // Tentatively commute the operands and try again. 600 TII->commuteInstruction(Insert, /*NewMI=*/false, Operand0, Operand1); 601 TreeWalker.ResetTopOperands(Insert); 602 TentativelyCommuting = true; 603 Declined = false; 604 } 605 } 606 } 607 608 /// Stackification for some operand was successful. Reset to the default 609 /// state. 610 void Reset() { 611 TentativelyCommuting = false; 612 Declined = false; 613 } 614 }; 615 } // end anonymous namespace 616 617 bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) { 618 DEBUG(dbgs() << "********** Register Stackifying **********\n" 619 "********** Function: " 620 << MF.getName() << '\n'); 621 622 bool Changed = false; 623 MachineRegisterInfo &MRI = MF.getRegInfo(); 624 WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 625 const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 626 const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo(); 627 AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults(); 628 MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>(); 629 LiveIntervals &LIS = getAnalysis<LiveIntervals>(); 630 631 // Walk the instructions from the bottom up. Currently we don't look past 632 // block boundaries, and the blocks aren't ordered so the block visitation 633 // order isn't significant, but we may want to change this in the future. 634 for (MachineBasicBlock &MBB : MF) { 635 // Don't use a range-based for loop, because we modify the list as we're 636 // iterating over it and the end iterator may change. 637 for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) { 638 MachineInstr *Insert = &*MII; 639 // Don't nest anything inside a phi. 640 if (Insert->getOpcode() == TargetOpcode::PHI) 641 break; 642 643 // Don't nest anything inside an inline asm, because we don't have 644 // constraints for $push inputs. 645 if (Insert->getOpcode() == TargetOpcode::INLINEASM) 646 continue; 647 648 // Ignore debugging intrinsics. 649 if (Insert->getOpcode() == TargetOpcode::DBG_VALUE) 650 continue; 651 652 // Iterate through the inputs in reverse order, since we'll be pulling 653 // operands off the stack in LIFO order. 654 CommutingState Commuting; 655 TreeWalkerState TreeWalker(Insert); 656 while (!TreeWalker.Done()) { 657 MachineOperand &Op = TreeWalker.Pop(); 658 659 // We're only interested in explicit virtual register operands. 660 if (!Op.isReg()) 661 continue; 662 663 unsigned Reg = Op.getReg(); 664 assert(Op.isUse() && "explicit_uses() should only iterate over uses"); 665 assert(!Op.isImplicit() && 666 "explicit_uses() should only iterate over explicit operands"); 667 if (TargetRegisterInfo::isPhysicalRegister(Reg)) 668 continue; 669 670 // Identify the definition for this register at this point. Most 671 // registers are in SSA form here so we try a quick MRI query first. 672 MachineInstr *Def = GetVRegDef(Reg, Insert, MRI, LIS); 673 if (!Def) 674 continue; 675 676 // Don't nest an INLINE_ASM def into anything, because we don't have 677 // constraints for $pop outputs. 678 if (Def->getOpcode() == TargetOpcode::INLINEASM) 679 continue; 680 681 // Don't nest PHIs inside of anything. 682 if (Def->getOpcode() == TargetOpcode::PHI) 683 continue; 684 685 // Argument instructions represent live-in registers and not real 686 // instructions. 687 if (Def->getOpcode() == WebAssembly::ARGUMENT_I32 || 688 Def->getOpcode() == WebAssembly::ARGUMENT_I64 || 689 Def->getOpcode() == WebAssembly::ARGUMENT_F32 || 690 Def->getOpcode() == WebAssembly::ARGUMENT_F64) 691 continue; 692 693 // Decide which strategy to take. Prefer to move a single-use value 694 // over cloning it, and prefer cloning over introducing a tee_local. 695 // For moving, we require the def to be in the same block as the use; 696 // this makes things simpler (LiveIntervals' handleMove function only 697 // supports intra-block moves) and it's MachineSink's job to catch all 698 // the sinking opportunities anyway. 699 bool SameBlock = Def->getParent() == &MBB; 700 bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, LIS, MRI) && 701 !TreeWalker.IsOnStack(Reg); 702 if (CanMove && MRI.hasOneUse(Reg)) { 703 Insert = MoveForSingleUse(Reg, Op, Def, MBB, Insert, LIS, MFI, MRI); 704 } else if (ShouldRematerialize(Def, AA, TII)) { 705 Insert = RematerializeCheapDef(Reg, Op, Def, MBB, Insert, LIS, MFI, 706 MRI, TII, TRI); 707 } else if (CanMove && 708 OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS)) { 709 Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI, 710 MRI, TII); 711 } else { 712 // We failed to stackify the operand. If the problem was ordering 713 // constraints, Commuting may be able to help. 714 if (!CanMove && SameBlock) 715 Commuting.MaybeCommute(Insert, TreeWalker, TII); 716 // Proceed to the next operand. 717 continue; 718 } 719 720 // We stackified an operand. Add the defining instruction's operands to 721 // the worklist stack now to continue to build an ever deeper tree. 722 Commuting.Reset(); 723 TreeWalker.PushOperands(Insert); 724 } 725 726 // If we stackified any operands, skip over the tree to start looking for 727 // the next instruction we can build a tree on. 728 if (Insert != &*MII) { 729 ImposeStackOrdering(&*MII); 730 MII = std::prev( 731 llvm::make_reverse_iterator(MachineBasicBlock::iterator(Insert))); 732 Changed = true; 733 } 734 } 735 } 736 737 // If we used EXPR_STACK anywhere, add it to the live-in sets everywhere so 738 // that it never looks like a use-before-def. 739 if (Changed) { 740 MF.getRegInfo().addLiveIn(WebAssembly::EXPR_STACK); 741 for (MachineBasicBlock &MBB : MF) 742 MBB.addLiveIn(WebAssembly::EXPR_STACK); 743 } 744 745 #ifndef NDEBUG 746 // Verify that pushes and pops are performed in LIFO order. 747 SmallVector<unsigned, 0> Stack; 748 for (MachineBasicBlock &MBB : MF) { 749 for (MachineInstr &MI : MBB) { 750 if (MI.isDebugValue()) 751 continue; 752 for (MachineOperand &MO : reverse(MI.explicit_operands())) { 753 if (!MO.isReg()) 754 continue; 755 unsigned Reg = MO.getReg(); 756 757 if (MFI.isVRegStackified(Reg)) { 758 if (MO.isDef()) 759 Stack.push_back(Reg); 760 else 761 assert(Stack.pop_back_val() == Reg && 762 "Register stack pop should be paired with a push"); 763 } 764 } 765 } 766 // TODO: Generalize this code to support keeping values on the stack across 767 // basic block boundaries. 768 assert(Stack.empty() && 769 "Register stack pushes and pops should be balanced"); 770 } 771 #endif 772 773 return Changed; 774 } 775