1 //===-- MemorySSAUpdater.cpp - Memory SSA Updater--------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------===// 9 // 10 // This file implements the MemorySSAUpdater class. 11 // 12 //===----------------------------------------------------------------===// 13 #include "llvm/Analysis/MemorySSAUpdater.h" 14 #include "llvm/ADT/STLExtras.h" 15 #include "llvm/ADT/SmallPtrSet.h" 16 #include "llvm/Analysis/MemorySSA.h" 17 #include "llvm/IR/DataLayout.h" 18 #include "llvm/IR/Dominators.h" 19 #include "llvm/IR/GlobalVariable.h" 20 #include "llvm/IR/IRBuilder.h" 21 #include "llvm/IR/LLVMContext.h" 22 #include "llvm/IR/Metadata.h" 23 #include "llvm/IR/Module.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/FormattedStream.h" 26 #include <algorithm> 27 28 #define DEBUG_TYPE "memoryssa" 29 using namespace llvm; 30 31 // This is the marker algorithm from "Simple and Efficient Construction of 32 // Static Single Assignment Form" 33 // The simple, non-marker algorithm places phi nodes at any join 34 // Here, we place markers, and only place phi nodes if they end up necessary. 35 // They are only necessary if they break a cycle (IE we recursively visit 36 // ourselves again), or we discover, while getting the value of the operands, 37 // that there are two or more definitions needing to be merged. 38 // This still will leave non-minimal form in the case of irreducible control 39 // flow, where phi nodes may be in cycles with themselves, but unnecessary. 40 MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive( 41 BasicBlock *BB, 42 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 43 // First, do a cache lookup. Without this cache, certain CFG structures 44 // (like a series of if statements) take exponential time to visit. 45 auto Cached = CachedPreviousDef.find(BB); 46 if (Cached != CachedPreviousDef.end()) { 47 return Cached->second; 48 } else if (BasicBlock *Pred = BB->getSinglePredecessor()) { 49 // Single predecessor case, just recurse, we can only have one definition. 50 MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef); 51 CachedPreviousDef.insert({BB, Result}); 52 return Result; 53 } else if (VisitedBlocks.count(BB)) { 54 // We hit our node again, meaning we had a cycle, we must insert a phi 55 // node to break it so we have an operand. The only case this will 56 // insert useless phis is if we have irreducible control flow. 57 MemoryAccess *Result = MSSA->createMemoryPhi(BB); 58 CachedPreviousDef.insert({BB, Result}); 59 return Result; 60 } else if (VisitedBlocks.insert(BB).second) { 61 // Mark us visited so we can detect a cycle 62 SmallVector<MemoryAccess *, 8> PhiOps; 63 64 // Recurse to get the values in our predecessors for placement of a 65 // potential phi node. This will insert phi nodes if we cycle in order to 66 // break the cycle and have an operand. 67 for (auto *Pred : predecessors(BB)) 68 PhiOps.push_back(getPreviousDefFromEnd(Pred, CachedPreviousDef)); 69 70 // Now try to simplify the ops to avoid placing a phi. 71 // This may return null if we never created a phi yet, that's okay 72 MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MSSA->getMemoryAccess(BB)); 73 bool PHIExistsButNeedsUpdate = false; 74 // See if the existing phi operands match what we need. 75 // Unlike normal SSA, we only allow one phi node per block, so we can't just 76 // create a new one. 77 if (Phi && Phi->getNumOperands() != 0) 78 if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) { 79 PHIExistsButNeedsUpdate = true; 80 } 81 82 // See if we can avoid the phi by simplifying it. 83 auto *Result = tryRemoveTrivialPhi(Phi, PhiOps); 84 // If we couldn't simplify, we may have to create a phi 85 if (Result == Phi) { 86 if (!Phi) 87 Phi = MSSA->createMemoryPhi(BB); 88 89 // These will have been filled in by the recursive read we did above. 90 if (PHIExistsButNeedsUpdate) { 91 std::copy(PhiOps.begin(), PhiOps.end(), Phi->op_begin()); 92 std::copy(pred_begin(BB), pred_end(BB), Phi->block_begin()); 93 } else { 94 unsigned i = 0; 95 for (auto *Pred : predecessors(BB)) 96 Phi->addIncoming(PhiOps[i++], Pred); 97 InsertedPHIs.push_back(Phi); 98 } 99 Result = Phi; 100 } 101 102 // Set ourselves up for the next variable by resetting visited state. 103 VisitedBlocks.erase(BB); 104 CachedPreviousDef.insert({BB, Result}); 105 return Result; 106 } 107 llvm_unreachable("Should have hit one of the three cases above"); 108 } 109 110 // This starts at the memory access, and goes backwards in the block to find the 111 // previous definition. If a definition is not found the block of the access, 112 // it continues globally, creating phi nodes to ensure we have a single 113 // definition. 114 MemoryAccess *MemorySSAUpdater::getPreviousDef(MemoryAccess *MA) { 115 if (auto *LocalResult = getPreviousDefInBlock(MA)) 116 return LocalResult; 117 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; 118 return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef); 119 } 120 121 // This starts at the memory access, and goes backwards in the block to the find 122 // the previous definition. If the definition is not found in the block of the 123 // access, it returns nullptr. 124 MemoryAccess *MemorySSAUpdater::getPreviousDefInBlock(MemoryAccess *MA) { 125 auto *Defs = MSSA->getWritableBlockDefs(MA->getBlock()); 126 127 // It's possible there are no defs, or we got handed the first def to start. 128 if (Defs) { 129 // If this is a def, we can just use the def iterators. 130 if (!isa<MemoryUse>(MA)) { 131 auto Iter = MA->getReverseDefsIterator(); 132 ++Iter; 133 if (Iter != Defs->rend()) 134 return &*Iter; 135 } else { 136 // Otherwise, have to walk the all access iterator. 137 auto End = MSSA->getWritableBlockAccesses(MA->getBlock())->rend(); 138 for (auto &U : make_range(++MA->getReverseIterator(), End)) 139 if (!isa<MemoryUse>(U)) 140 return cast<MemoryAccess>(&U); 141 // Note that if MA comes before Defs->begin(), we won't hit a def. 142 return nullptr; 143 } 144 } 145 return nullptr; 146 } 147 148 // This starts at the end of block 149 MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd( 150 BasicBlock *BB, 151 DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 152 auto *Defs = MSSA->getWritableBlockDefs(BB); 153 154 if (Defs) 155 return &*Defs->rbegin(); 156 157 return getPreviousDefRecursive(BB, CachedPreviousDef); 158 } 159 // Recurse over a set of phi uses to eliminate the trivial ones 160 MemoryAccess *MemorySSAUpdater::recursePhi(MemoryAccess *Phi) { 161 if (!Phi) 162 return nullptr; 163 TrackingVH<MemoryAccess> Res(Phi); 164 SmallVector<TrackingVH<Value>, 8> Uses; 165 std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses)); 166 for (auto &U : Uses) { 167 if (MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U)) { 168 auto OperRange = UsePhi->operands(); 169 tryRemoveTrivialPhi(UsePhi, OperRange); 170 } 171 } 172 return Res; 173 } 174 175 // Eliminate trivial phis 176 // Phis are trivial if they are defined either by themselves, or all the same 177 // argument. 178 // IE phi(a, a) or b = phi(a, b) or c = phi(a, a, c) 179 // We recursively try to remove them. 180 template <class RangeType> 181 MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi, 182 RangeType &Operands) { 183 // Bail out on non-opt Phis. 184 if (NonOptPhis.count(Phi)) 185 return Phi; 186 187 // Detect equal or self arguments 188 MemoryAccess *Same = nullptr; 189 for (auto &Op : Operands) { 190 // If the same or self, good so far 191 if (Op == Phi || Op == Same) 192 continue; 193 // not the same, return the phi since it's not eliminatable by us 194 if (Same) 195 return Phi; 196 Same = cast<MemoryAccess>(Op); 197 } 198 // Never found a non-self reference, the phi is undef 199 if (Same == nullptr) 200 return MSSA->getLiveOnEntryDef(); 201 if (Phi) { 202 Phi->replaceAllUsesWith(Same); 203 removeMemoryAccess(Phi); 204 } 205 206 // We should only end up recursing in case we replaced something, in which 207 // case, we may have made other Phis trivial. 208 return recursePhi(Same); 209 } 210 211 void MemorySSAUpdater::insertUse(MemoryUse *MU) { 212 InsertedPHIs.clear(); 213 MU->setDefiningAccess(getPreviousDef(MU)); 214 // Unlike for defs, there is no extra work to do. Because uses do not create 215 // new may-defs, there are only two cases: 216 // 217 // 1. There was a def already below us, and therefore, we should not have 218 // created a phi node because it was already needed for the def. 219 // 220 // 2. There is no def below us, and therefore, there is no extra renaming work 221 // to do. 222 } 223 224 // Set every incoming edge {BB, MP->getBlock()} of MemoryPhi MP to NewDef. 225 static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, 226 MemoryAccess *NewDef) { 227 // Replace any operand with us an incoming block with the new defining 228 // access. 229 int i = MP->getBasicBlockIndex(BB); 230 assert(i != -1 && "Should have found the basic block in the phi"); 231 // We can't just compare i against getNumOperands since one is signed and the 232 // other not. So use it to index into the block iterator. 233 for (auto BBIter = MP->block_begin() + i; BBIter != MP->block_end(); 234 ++BBIter) { 235 if (*BBIter != BB) 236 break; 237 MP->setIncomingValue(i, NewDef); 238 ++i; 239 } 240 } 241 242 // A brief description of the algorithm: 243 // First, we compute what should define the new def, using the SSA 244 // construction algorithm. 245 // Then, we update the defs below us (and any new phi nodes) in the graph to 246 // point to the correct new defs, to ensure we only have one variable, and no 247 // disconnected stores. 248 void MemorySSAUpdater::insertDef(MemoryDef *MD, bool RenameUses) { 249 InsertedPHIs.clear(); 250 251 // See if we had a local def, and if not, go hunting. 252 MemoryAccess *DefBefore = getPreviousDef(MD); 253 bool DefBeforeSameBlock = DefBefore->getBlock() == MD->getBlock(); 254 255 // There is a def before us, which means we can replace any store/phi uses 256 // of that thing with us, since we are in the way of whatever was there 257 // before. 258 // We now define that def's memorydefs and memoryphis 259 if (DefBeforeSameBlock) { 260 for (auto UI = DefBefore->use_begin(), UE = DefBefore->use_end(); 261 UI != UE;) { 262 Use &U = *UI++; 263 // Leave the uses alone 264 if (isa<MemoryUse>(U.getUser())) 265 continue; 266 U.set(MD); 267 } 268 } 269 270 // and that def is now our defining access. 271 // We change them in this order otherwise we will appear in the use list 272 // above and reset ourselves. 273 MD->setDefiningAccess(DefBefore); 274 275 SmallVector<MemoryAccess *, 8> FixupList(InsertedPHIs.begin(), 276 InsertedPHIs.end()); 277 if (!DefBeforeSameBlock) { 278 // If there was a local def before us, we must have the same effect it 279 // did. Because every may-def is the same, any phis/etc we would create, it 280 // would also have created. If there was no local def before us, we 281 // performed a global update, and have to search all successors and make 282 // sure we update the first def in each of them (following all paths until 283 // we hit the first def along each path). This may also insert phi nodes. 284 // TODO: There are other cases we can skip this work, such as when we have a 285 // single successor, and only used a straight line of single pred blocks 286 // backwards to find the def. To make that work, we'd have to track whether 287 // getDefRecursive only ever used the single predecessor case. These types 288 // of paths also only exist in between CFG simplifications. 289 FixupList.push_back(MD); 290 } 291 292 while (!FixupList.empty()) { 293 unsigned StartingPHISize = InsertedPHIs.size(); 294 fixupDefs(FixupList); 295 FixupList.clear(); 296 // Put any new phis on the fixup list, and process them 297 FixupList.append(InsertedPHIs.end() - StartingPHISize, InsertedPHIs.end()); 298 } 299 // Now that all fixups are done, rename all uses if we are asked. 300 if (RenameUses) { 301 SmallPtrSet<BasicBlock *, 16> Visited; 302 BasicBlock *StartBlock = MD->getBlock(); 303 // We are guaranteed there is a def in the block, because we just got it 304 // handed to us in this function. 305 MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin(); 306 // Convert to incoming value if it's a memorydef. A phi *is* already an 307 // incoming value. 308 if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) 309 FirstDef = MD->getDefiningAccess(); 310 311 MSSA->renamePass(MD->getBlock(), FirstDef, Visited); 312 // We just inserted a phi into this block, so the incoming value will become 313 // the phi anyway, so it does not matter what we pass. 314 for (auto *MP : InsertedPHIs) 315 MSSA->renamePass(MP->getBlock(), nullptr, Visited); 316 } 317 } 318 319 void MemorySSAUpdater::fixupDefs(const SmallVectorImpl<MemoryAccess *> &Vars) { 320 SmallPtrSet<const BasicBlock *, 8> Seen; 321 SmallVector<const BasicBlock *, 16> Worklist; 322 for (auto *NewDef : Vars) { 323 // First, see if there is a local def after the operand. 324 auto *Defs = MSSA->getWritableBlockDefs(NewDef->getBlock()); 325 auto DefIter = NewDef->getDefsIterator(); 326 327 // The temporary Phi is being fixed, unmark it for not to optimize. 328 if (MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(NewDef)) 329 NonOptPhis.erase(Phi); 330 331 // If there is a local def after us, we only have to rename that. 332 if (++DefIter != Defs->end()) { 333 cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef); 334 continue; 335 } 336 337 // Otherwise, we need to search down through the CFG. 338 // For each of our successors, handle it directly if their is a phi, or 339 // place on the fixup worklist. 340 for (const auto *S : successors(NewDef->getBlock())) { 341 if (auto *MP = MSSA->getMemoryAccess(S)) 342 setMemoryPhiValueForBlock(MP, NewDef->getBlock(), NewDef); 343 else 344 Worklist.push_back(S); 345 } 346 347 while (!Worklist.empty()) { 348 const BasicBlock *FixupBlock = Worklist.back(); 349 Worklist.pop_back(); 350 351 // Get the first def in the block that isn't a phi node. 352 if (auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) { 353 auto *FirstDef = &*Defs->begin(); 354 // The loop above and below should have taken care of phi nodes 355 assert(!isa<MemoryPhi>(FirstDef) && 356 "Should have already handled phi nodes!"); 357 // We are now this def's defining access, make sure we actually dominate 358 // it 359 assert(MSSA->dominates(NewDef, FirstDef) && 360 "Should have dominated the new access"); 361 362 // This may insert new phi nodes, because we are not guaranteed the 363 // block we are processing has a single pred, and depending where the 364 // store was inserted, it may require phi nodes below it. 365 cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef)); 366 return; 367 } 368 // We didn't find a def, so we must continue. 369 for (const auto *S : successors(FixupBlock)) { 370 // If there is a phi node, handle it. 371 // Otherwise, put the block on the worklist 372 if (auto *MP = MSSA->getMemoryAccess(S)) 373 setMemoryPhiValueForBlock(MP, FixupBlock, NewDef); 374 else { 375 // If we cycle, we should have ended up at a phi node that we already 376 // processed. FIXME: Double check this 377 if (!Seen.insert(S).second) 378 continue; 379 Worklist.push_back(S); 380 } 381 } 382 } 383 } 384 } 385 386 // Move What before Where in the MemorySSA IR. 387 template <class WhereType> 388 void MemorySSAUpdater::moveTo(MemoryUseOrDef *What, BasicBlock *BB, 389 WhereType Where) { 390 // Mark MemoryPhi users of What not to be optimized. 391 for (auto *U : What->users()) 392 if (MemoryPhi *PhiUser = dyn_cast_or_null<MemoryPhi>(U)) 393 NonOptPhis.insert(PhiUser); 394 395 // Replace all our users with our defining access. 396 What->replaceAllUsesWith(What->getDefiningAccess()); 397 398 // Let MemorySSA take care of moving it around in the lists. 399 MSSA->moveTo(What, BB, Where); 400 401 // Now reinsert it into the IR and do whatever fixups needed. 402 if (auto *MD = dyn_cast<MemoryDef>(What)) 403 insertDef(MD); 404 else 405 insertUse(cast<MemoryUse>(What)); 406 407 // Clear dangling pointers. We added all MemoryPhi users, but not all 408 // of them are removed by fixupDefs(). 409 NonOptPhis.clear(); 410 } 411 412 // Move What before Where in the MemorySSA IR. 413 void MemorySSAUpdater::moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 414 moveTo(What, Where->getBlock(), Where->getIterator()); 415 } 416 417 // Move What after Where in the MemorySSA IR. 418 void MemorySSAUpdater::moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 419 moveTo(What, Where->getBlock(), ++Where->getIterator()); 420 } 421 422 void MemorySSAUpdater::moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, 423 MemorySSA::InsertionPlace Where) { 424 return moveTo(What, BB, Where); 425 } 426 427 /// If all arguments of a MemoryPHI are defined by the same incoming 428 /// argument, return that argument. 429 static MemoryAccess *onlySingleValue(MemoryPhi *MP) { 430 MemoryAccess *MA = nullptr; 431 432 for (auto &Arg : MP->operands()) { 433 if (!MA) 434 MA = cast<MemoryAccess>(Arg); 435 else if (MA != Arg) 436 return nullptr; 437 } 438 return MA; 439 } 440 441 void MemorySSAUpdater::removeMemoryAccess(MemoryAccess *MA) { 442 assert(!MSSA->isLiveOnEntryDef(MA) && 443 "Trying to remove the live on entry def"); 444 // We can only delete phi nodes if they have no uses, or we can replace all 445 // uses with a single definition. 446 MemoryAccess *NewDefTarget = nullptr; 447 if (MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) { 448 // Note that it is sufficient to know that all edges of the phi node have 449 // the same argument. If they do, by the definition of dominance frontiers 450 // (which we used to place this phi), that argument must dominate this phi, 451 // and thus, must dominate the phi's uses, and so we will not hit the assert 452 // below. 453 NewDefTarget = onlySingleValue(MP); 454 assert((NewDefTarget || MP->use_empty()) && 455 "We can't delete this memory phi"); 456 } else { 457 NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess(); 458 } 459 460 // Re-point the uses at our defining access 461 if (!isa<MemoryUse>(MA) && !MA->use_empty()) { 462 // Reset optimized on users of this store, and reset the uses. 463 // A few notes: 464 // 1. This is a slightly modified version of RAUW to avoid walking the 465 // uses twice here. 466 // 2. If we wanted to be complete, we would have to reset the optimized 467 // flags on users of phi nodes if doing the below makes a phi node have all 468 // the same arguments. Instead, we prefer users to removeMemoryAccess those 469 // phi nodes, because doing it here would be N^3. 470 if (MA->hasValueHandle()) 471 ValueHandleBase::ValueIsRAUWd(MA, NewDefTarget); 472 // Note: We assume MemorySSA is not used in metadata since it's not really 473 // part of the IR. 474 475 while (!MA->use_empty()) { 476 Use &U = *MA->use_begin(); 477 if (auto *MUD = dyn_cast<MemoryUseOrDef>(U.getUser())) 478 MUD->resetOptimized(); 479 U.set(NewDefTarget); 480 } 481 } 482 483 // The call below to erase will destroy MA, so we can't change the order we 484 // are doing things here 485 MSSA->removeFromLookups(MA); 486 MSSA->removeFromLists(MA); 487 } 488 489 MemoryAccess *MemorySSAUpdater::createMemoryAccessInBB( 490 Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, 491 MemorySSA::InsertionPlace Point) { 492 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 493 MSSA->insertIntoListsForBlock(NewAccess, BB, Point); 494 return NewAccess; 495 } 496 497 MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessBefore( 498 Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt) { 499 assert(I->getParent() == InsertPt->getBlock() && 500 "New and old access must be in the same block"); 501 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 502 MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 503 InsertPt->getIterator()); 504 return NewAccess; 505 } 506 507 MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessAfter( 508 Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt) { 509 assert(I->getParent() == InsertPt->getBlock() && 510 "New and old access must be in the same block"); 511 MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 512 MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 513 ++InsertPt->getIterator()); 514 return NewAccess; 515 } 516