1 //===-- MemorySSAUpdater.cpp - Memory SSA Updater--------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------===// 9 // 10 // This file implements the MemorySSAUpdater class. 11 // 12 //===----------------------------------------------------------------===// 13 #include "llvm/Analysis/MemorySSAUpdater.h" 14 #include "llvm/ADT/STLExtras.h" 15 #include "llvm/ADT/SmallPtrSet.h" 16 #include "llvm/Analysis/MemorySSA.h" 17 #include "llvm/IR/DataLayout.h" 18 #include "llvm/IR/Dominators.h" 19 #include "llvm/IR/GlobalVariable.h" 20 #include "llvm/IR/IRBuilder.h" 21 #include "llvm/IR/LLVMContext.h" 22 #include "llvm/IR/Metadata.h" 23 #include "llvm/IR/Module.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/FormattedStream.h" 26 #include <algorithm> 27 28 #define DEBUG_TYPE "memoryssa" 29 using namespace llvm; 30 31 // This is the marker algorithm from "Simple and Efficient Construction of 32 // Static Single Assignment Form" 33 // The simple, non-marker algorithm places phi nodes at any join 34 // Here, we place markers, and only place phi nodes if they end up necessary. 35 // They are only necessary if they break a cycle (IE we recursively visit 36 // ourselves again), or we discover, while getting the value of the operands, 37 // that there are two or more definitions needing to be merged. 38 // This still will leave non-minimal form in the case of irreducible control 39 // flow, where phi nodes may be in cycles with themselves, but unnecessary. 40 MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive( 41 BasicBlock *BB, 42 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 43 // First, do a cache lookup. Without this cache, certain CFG structures 44 // (like a series of if statements) take exponential time to visit. 45 auto Cached = CachedPreviousDef.find(BB); 46 if (Cached != CachedPreviousDef.end()) { 47 return Cached->second; 48 } 49 50 if (BasicBlock *Pred = BB->getSinglePredecessor()) { 51 // Single predecessor case, just recurse, we can only have one definition. 52 MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef); 53 CachedPreviousDef.insert({BB, Result}); 54 return Result; 55 } 56 57 if (VisitedBlocks.count(BB)) { 58 // We hit our node again, meaning we had a cycle, we must insert a phi 59 // node to break it so we have an operand. The only case this will 60 // insert useless phis is if we have irreducible control flow. 61 MemoryAccess *Result = MSSA->createMemoryPhi(BB); 62 CachedPreviousDef.insert({BB, Result}); 63 return Result; 64 } 65 66 if (VisitedBlocks.insert(BB).second) { 67 // Mark us visited so we can detect a cycle 68 SmallVector<MemoryAccess *, 8> PhiOps; 69 70 // Recurse to get the values in our predecessors for placement of a 71 // potential phi node. This will insert phi nodes if we cycle in order to 72 // break the cycle and have an operand. 73 for (auto *Pred : predecessors(BB)) 74 PhiOps.push_back(getPreviousDefFromEnd(Pred, CachedPreviousDef)); 75 76 // Now try to simplify the ops to avoid placing a phi. 77 // This may return null if we never created a phi yet, that's okay 78 MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MSSA->getMemoryAccess(BB)); 79 bool PHIExistsButNeedsUpdate = false; 80 // See if the existing phi operands match what we need. 81 // Unlike normal SSA, we only allow one phi node per block, so we can't just 82 // create a new one. 83 if (Phi && Phi->getNumOperands() != 0) 84 if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) { 85 PHIExistsButNeedsUpdate = true; 86 } 87 88 // See if we can avoid the phi by simplifying it. 89 auto *Result = tryRemoveTrivialPhi(Phi, PhiOps); 90 // If we couldn't simplify, we may have to create a phi 91 if (Result == Phi) { 92 if (!Phi) 93 Phi = MSSA->createMemoryPhi(BB); 94 95 // These will have been filled in by the recursive read we did above. 96 if (PHIExistsButNeedsUpdate) { 97 std::copy(PhiOps.begin(), PhiOps.end(), Phi->op_begin()); 98 std::copy(pred_begin(BB), pred_end(BB), Phi->block_begin()); 99 } else { 100 unsigned i = 0; 101 for (auto *Pred : predecessors(BB)) 102 Phi->addIncoming(PhiOps[i++], Pred); 103 InsertedPHIs.push_back(Phi); 104 } 105 Result = Phi; 106 } 107 108 // Set ourselves up for the next variable by resetting visited state. 109 VisitedBlocks.erase(BB); 110 CachedPreviousDef.insert({BB, Result}); 111 return Result; 112 } 113 llvm_unreachable("Should have hit one of the three cases above"); 114 } 115 116 // This starts at the memory access, and goes backwards in the block to find the 117 // previous definition. If a definition is not found the block of the access, 118 // it continues globally, creating phi nodes to ensure we have a single 119 // definition. 120 MemoryAccess *MemorySSAUpdater::getPreviousDef(MemoryAccess *MA) { 121 if (auto *LocalResult = getPreviousDefInBlock(MA)) 122 return LocalResult; 123 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; 124 return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef); 125 } 126 127 // This starts at the memory access, and goes backwards in the block to the find 128 // the previous definition. If the definition is not found in the block of the 129 // access, it returns nullptr. 130 MemoryAccess *MemorySSAUpdater::getPreviousDefInBlock(MemoryAccess *MA) { 131 auto *Defs = MSSA->getWritableBlockDefs(MA->getBlock()); 132 133 // It's possible there are no defs, or we got handed the first def to start. 134 if (Defs) { 135 // If this is a def, we can just use the def iterators. 136 if (!isa<MemoryUse>(MA)) { 137 auto Iter = MA->getReverseDefsIterator(); 138 ++Iter; 139 if (Iter != Defs->rend()) 140 return &*Iter; 141 } else { 142 // Otherwise, have to walk the all access iterator. 143 auto End = MSSA->getWritableBlockAccesses(MA->getBlock())->rend(); 144 for (auto &U : make_range(++MA->getReverseIterator(), End)) 145 if (!isa<MemoryUse>(U)) 146 return cast<MemoryAccess>(&U); 147 // Note that if MA comes before Defs->begin(), we won't hit a def. 148 return nullptr; 149 } 150 } 151 return nullptr; 152 } 153 154 // This starts at the end of block 155 MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd( 156 BasicBlock *BB, 157 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 158 auto *Defs = MSSA->getWritableBlockDefs(BB); 159 160 if (Defs) 161 return &*Defs->rbegin(); 162 163 return getPreviousDefRecursive(BB, CachedPreviousDef); 164 } 165 // Recurse over a set of phi uses to eliminate the trivial ones 166 MemoryAccess *MemorySSAUpdater::recursePhi(MemoryAccess *Phi) { 167 if (!Phi) 168 return nullptr; 169 TrackingVH<MemoryAccess> Res(Phi); 170 SmallVector<TrackingVH<Value>, 8> Uses; 171 std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses)); 172 for (auto &U : Uses) { 173 if (MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U)) { 174 auto OperRange = UsePhi->operands(); 175 tryRemoveTrivialPhi(UsePhi, OperRange); 176 } 177 } 178 return Res; 179 } 180 181 // Eliminate trivial phis 182 // Phis are trivial if they are defined either by themselves, or all the same 183 // argument. 184 // IE phi(a, a) or b = phi(a, b) or c = phi(a, a, c) 185 // We recursively try to remove them. 186 template <class RangeType> 187 MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi, 188 RangeType &Operands) { 189 // Bail out on non-opt Phis. 190 if (NonOptPhis.count(Phi)) 191 return Phi; 192 193 // Detect equal or self arguments 194 MemoryAccess *Same = nullptr; 195 for (auto &Op : Operands) { 196 // If the same or self, good so far 197 if (Op == Phi || Op == Same) 198 continue; 199 // not the same, return the phi since it's not eliminatable by us 200 if (Same) 201 return Phi; 202 Same = cast<MemoryAccess>(Op); 203 } 204 // Never found a non-self reference, the phi is undef 205 if (Same == nullptr) 206 return MSSA->getLiveOnEntryDef(); 207 if (Phi) { 208 Phi->replaceAllUsesWith(Same); 209 removeMemoryAccess(Phi); 210 } 211 212 // We should only end up recursing in case we replaced something, in which 213 // case, we may have made other Phis trivial. 214 return recursePhi(Same); 215 } 216 217 void MemorySSAUpdater::insertUse(MemoryUse *MU) { 218 InsertedPHIs.clear(); 219 MU->setDefiningAccess(getPreviousDef(MU)); 220 // Unlike for defs, there is no extra work to do. Because uses do not create 221 // new may-defs, there are only two cases: 222 // 223 // 1. There was a def already below us, and therefore, we should not have 224 // created a phi node because it was already needed for the def. 225 // 226 // 2. There is no def below us, and therefore, there is no extra renaming work 227 // to do. 228 } 229 230 // Set every incoming edge {BB, MP->getBlock()} of MemoryPhi MP to NewDef. 231 static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, 232 MemoryAccess *NewDef) { 233 // Replace any operand with us an incoming block with the new defining 234 // access. 235 int i = MP->getBasicBlockIndex(BB); 236 assert(i != -1 && "Should have found the basic block in the phi"); 237 // We can't just compare i against getNumOperands since one is signed and the 238 // other not. So use it to index into the block iterator. 239 for (auto BBIter = MP->block_begin() + i; BBIter != MP->block_end(); 240 ++BBIter) { 241 if (*BBIter != BB) 242 break; 243 MP->setIncomingValue(i, NewDef); 244 ++i; 245 } 246 } 247 248 // A brief description of the algorithm: 249 // First, we compute what should define the new def, using the SSA 250 // construction algorithm. 251 // Then, we update the defs below us (and any new phi nodes) in the graph to 252 // point to the correct new defs, to ensure we only have one variable, and no 253 // disconnected stores. 254 void MemorySSAUpdater::insertDef(MemoryDef *MD, bool RenameUses) { 255 InsertedPHIs.clear(); 256 257 // See if we had a local def, and if not, go hunting. 258 MemoryAccess *DefBefore = getPreviousDef(MD); 259 bool DefBeforeSameBlock = DefBefore->getBlock() == MD->getBlock(); 260 261 // There is a def before us, which means we can replace any store/phi uses 262 // of that thing with us, since we are in the way of whatever was there 263 // before. 264 // We now define that def's memorydefs and memoryphis 265 if (DefBeforeSameBlock) { 266 for (auto UI = DefBefore->use_begin(), UE = DefBefore->use_end(); 267 UI != UE;) { 268 Use &U = *UI++; 269 // Leave the uses alone 270 if (isa<MemoryUse>(U.getUser())) 271 continue; 272 U.set(MD); 273 } 274 } 275 276 // and that def is now our defining access. 277 // We change them in this order otherwise we will appear in the use list 278 // above and reset ourselves. 279 MD->setDefiningAccess(DefBefore); 280 281 SmallVector<MemoryAccess *, 8> FixupList(InsertedPHIs.begin(), 282 InsertedPHIs.end()); 283 if (!DefBeforeSameBlock) { 284 // If there was a local def before us, we must have the same effect it 285 // did. Because every may-def is the same, any phis/etc we would create, it 286 // would also have created. If there was no local def before us, we 287 // performed a global update, and have to search all successors and make 288 // sure we update the first def in each of them (following all paths until 289 // we hit the first def along each path). This may also insert phi nodes. 290 // TODO: There are other cases we can skip this work, such as when we have a 291 // single successor, and only used a straight line of single pred blocks 292 // backwards to find the def. To make that work, we'd have to track whether 293 // getDefRecursive only ever used the single predecessor case. These types 294 // of paths also only exist in between CFG simplifications. 295 FixupList.push_back(MD); 296 } 297 298 while (!FixupList.empty()) { 299 unsigned StartingPHISize = InsertedPHIs.size(); 300 fixupDefs(FixupList); 301 FixupList.clear(); 302 // Put any new phis on the fixup list, and process them 303 FixupList.append(InsertedPHIs.end() - StartingPHISize, InsertedPHIs.end()); 304 } 305 // Now that all fixups are done, rename all uses if we are asked. 306 if (RenameUses) { 307 SmallPtrSet<BasicBlock *, 16> Visited; 308 BasicBlock *StartBlock = MD->getBlock(); 309 // We are guaranteed there is a def in the block, because we just got it 310 // handed to us in this function. 311 MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin(); 312 // Convert to incoming value if it's a memorydef. A phi *is* already an 313 // incoming value. 314 if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) 315 FirstDef = MD->getDefiningAccess(); 316 317 MSSA->renamePass(MD->getBlock(), FirstDef, Visited); 318 // We just inserted a phi into this block, so the incoming value will become 319 // the phi anyway, so it does not matter what we pass. 320 for (auto *MP : InsertedPHIs) 321 MSSA->renamePass(MP->getBlock(), nullptr, Visited); 322 } 323 } 324 325 void MemorySSAUpdater::fixupDefs(const SmallVectorImpl<MemoryAccess *> &Vars) { 326 SmallPtrSet<const BasicBlock *, 8> Seen; 327 SmallVector<const BasicBlock *, 16> Worklist; 328 for (auto *NewDef : Vars) { 329 // First, see if there is a local def after the operand. 330 auto *Defs = MSSA->getWritableBlockDefs(NewDef->getBlock()); 331 auto DefIter = NewDef->getDefsIterator(); 332 333 // The temporary Phi is being fixed, unmark it for not to optimize. 334 if (MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(NewDef)) 335 NonOptPhis.erase(Phi); 336 337 // If there is a local def after us, we only have to rename that. 338 if (++DefIter != Defs->end()) { 339 cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef); 340 continue; 341 } 342 343 // Otherwise, we need to search down through the CFG. 344 // For each of our successors, handle it directly if their is a phi, or 345 // place on the fixup worklist. 346 for (const auto *S : successors(NewDef->getBlock())) { 347 if (auto *MP = MSSA->getMemoryAccess(S)) 348 setMemoryPhiValueForBlock(MP, NewDef->getBlock(), NewDef); 349 else 350 Worklist.push_back(S); 351 } 352 353 while (!Worklist.empty()) { 354 const BasicBlock *FixupBlock = Worklist.back(); 355 Worklist.pop_back(); 356 357 // Get the first def in the block that isn't a phi node. 358 if (auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) { 359 auto *FirstDef = &*Defs->begin(); 360 // The loop above and below should have taken care of phi nodes 361 assert(!isa<MemoryPhi>(FirstDef) && 362 "Should have already handled phi nodes!"); 363 // We are now this def's defining access, make sure we actually dominate 364 // it 365 assert(MSSA->dominates(NewDef, FirstDef) && 366 "Should have dominated the new access"); 367 368 // This may insert new phi nodes, because we are not guaranteed the 369 // block we are processing has a single pred, and depending where the 370 // store was inserted, it may require phi nodes below it. 371 cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef)); 372 return; 373 } 374 // We didn't find a def, so we must continue. 375 for (const auto *S : successors(FixupBlock)) { 376 // If there is a phi node, handle it. 377 // Otherwise, put the block on the worklist 378 if (auto *MP = MSSA->getMemoryAccess(S)) 379 setMemoryPhiValueForBlock(MP, FixupBlock, NewDef); 380 else { 381 // If we cycle, we should have ended up at a phi node that we already 382 // processed. FIXME: Double check this 383 if (!Seen.insert(S).second) 384 continue; 385 Worklist.push_back(S); 386 } 387 } 388 } 389 } 390 } 391 392 // Move What before Where in the MemorySSA IR. 393 template <class WhereType> 394 void MemorySSAUpdater::moveTo(MemoryUseOrDef *What, BasicBlock *BB, 395 WhereType Where) { 396 // Mark MemoryPhi users of What not to be optimized. 397 for (auto *U : What->users()) 398 if (MemoryPhi *PhiUser = dyn_cast_or_null<MemoryPhi>(U)) 399 NonOptPhis.insert(PhiUser); 400 401 // Replace all our users with our defining access. 402 What->replaceAllUsesWith(What->getDefiningAccess()); 403 404 // Let MemorySSA take care of moving it around in the lists. 405 MSSA->moveTo(What, BB, Where); 406 407 // Now reinsert it into the IR and do whatever fixups needed. 408 if (auto *MD = dyn_cast<MemoryDef>(What)) 409 insertDef(MD); 410 else 411 insertUse(cast<MemoryUse>(What)); 412 413 // Clear dangling pointers. We added all MemoryPhi users, but not all 414 // of them are removed by fixupDefs(). 415 NonOptPhis.clear(); 416 } 417 418 // Move What before Where in the MemorySSA IR. 419 void MemorySSAUpdater::moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 420 moveTo(What, Where->getBlock(), Where->getIterator()); 421 } 422 423 // Move What after Where in the MemorySSA IR. 424 void MemorySSAUpdater::moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 425 moveTo(What, Where->getBlock(), ++Where->getIterator()); 426 } 427 428 void MemorySSAUpdater::moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, 429 MemorySSA::InsertionPlace Where) { 430 return moveTo(What, BB, Where); 431 } 432 433 /// If all arguments of a MemoryPHI are defined by the same incoming 434 /// argument, return that argument. 435 static MemoryAccess *onlySingleValue(MemoryPhi *MP) { 436 MemoryAccess *MA = nullptr; 437 438 for (auto &Arg : MP->operands()) { 439 if (!MA) 440 MA = cast<MemoryAccess>(Arg); 441 else if (MA != Arg) 442 return nullptr; 443 } 444 return MA; 445 } 446 447 void MemorySSAUpdater::removeMemoryAccess(MemoryAccess *MA) { 448 assert(!MSSA->isLiveOnEntryDef(MA) && 449 "Trying to remove the live on entry def"); 450 // We can only delete phi nodes if they have no uses, or we can replace all 451 // uses with a single definition. 452 MemoryAccess *NewDefTarget = nullptr; 453 if (MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) { 454 // Note that it is sufficient to know that all edges of the phi node have 455 // the same argument. If they do, by the definition of dominance frontiers 456 // (which we used to place this phi), that argument must dominate this phi, 457 // and thus, must dominate the phi's uses, and so we will not hit the assert 458 // below. 459 NewDefTarget = onlySingleValue(MP); 460 assert((NewDefTarget || MP->use_empty()) && 461 "We can't delete this memory phi"); 462 } else { 463 NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess(); 464 } 465 466 // Re-point the uses at our defining access 467 if (!isa<MemoryUse>(MA) && !MA->use_empty()) { 468 // Reset optimized on users of this store, and reset the uses. 469 // A few notes: 470 // 1. This is a slightly modified version of RAUW to avoid walking the 471 // uses twice here. 472 // 2. If we wanted to be complete, we would have to reset the optimized 473 // flags on users of phi nodes if doing the below makes a phi node have all 474 // the same arguments. Instead, we prefer users to removeMemoryAccess those 475 // phi nodes, because doing it here would be N^3. 476 if (MA->hasValueHandle()) 477 ValueHandleBase::ValueIsRAUWd(MA, NewDefTarget); 478 // Note: We assume MemorySSA is not used in metadata since it's not really 479 // part of the IR. 480 481 while (!MA->use_empty()) { 482 Use &U = *MA->use_begin(); 483 if (auto *MUD = dyn_cast<MemoryUseOrDef>(U.getUser())) 484 MUD->resetOptimized(); 485 U.set(NewDefTarget); 486 } 487 } 488 489 // The call below to erase will destroy MA, so we can't change the order we 490 // are doing things here 491 MSSA->removeFromLookups(MA); 492 MSSA->removeFromLists(MA); 493 } 494 495 MemoryAccess *MemorySSAUpdater::createMemoryAccessInBB( 496 Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, 497 MemorySSA::InsertionPlace Point) { 498 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 499 MSSA->insertIntoListsForBlock(NewAccess, BB, Point); 500 return NewAccess; 501 } 502 503 MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessBefore( 504 Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt) { 505 assert(I->getParent() == InsertPt->getBlock() && 506 "New and old access must be in the same block"); 507 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 508 MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 509 InsertPt->getIterator()); 510 return NewAccess; 511 } 512 513 MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessAfter( 514 Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt) { 515 assert(I->getParent() == InsertPt->getBlock() && 516 "New and old access must be in the same block"); 517 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 518 MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 519 ++InsertPt->getIterator()); 520 return NewAccess; 521 } 522