1 //===- ScopHelper.cpp - Some Helper Functions for Scop. ------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // Small functions that help with Scop and LLVM-IR. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "polly/Support/ScopHelper.h" 15 #include "polly/Options.h" 16 #include "polly/ScopInfo.h" 17 #include "polly/Support/SCEVValidator.h" 18 #include "llvm/Analysis/LoopInfo.h" 19 #include "llvm/Analysis/RegionInfo.h" 20 #include "llvm/Analysis/ScalarEvolution.h" 21 #include "llvm/Analysis/ScalarEvolutionExpander.h" 22 #include "llvm/Analysis/ScalarEvolutionExpressions.h" 23 #include "llvm/IR/CFG.h" 24 #include "llvm/IR/IntrinsicInst.h" 25 #include "llvm/Support/Debug.h" 26 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 27 28 using namespace llvm; 29 using namespace polly; 30 31 #define DEBUG_TYPE "polly-scop-helper" 32 33 static cl::opt<bool> PollyAllowErrorBlocks( 34 "polly-allow-error-blocks", 35 cl::desc("Allow to speculate on the execution of 'error blocks'."), 36 cl::Hidden, cl::init(true), cl::ZeroOrMore, cl::cat(PollyCategory)); 37 38 // Ensures that there is just one predecessor to the entry node from outside the 39 // region. 40 // The identity of the region entry node is preserved. 41 static void simplifyRegionEntry(Region *R, DominatorTree *DT, LoopInfo *LI, 42 RegionInfo *RI) { 43 BasicBlock *EnteringBB = R->getEnteringBlock(); 44 BasicBlock *Entry = R->getEntry(); 45 46 // Before (one of): 47 // 48 // \ / // 49 // EnteringBB // 50 // | \------> // 51 // \ / | // 52 // Entry <--\ Entry <--\ // 53 // / \ / / \ / // 54 // .... .... // 55 56 // Create single entry edge if the region has multiple entry edges. 57 if (!EnteringBB) { 58 SmallVector<BasicBlock *, 4> Preds; 59 for (BasicBlock *P : predecessors(Entry)) 60 if (!R->contains(P)) 61 Preds.push_back(P); 62 63 BasicBlock *NewEntering = 64 SplitBlockPredecessors(Entry, Preds, ".region_entering", DT, LI); 65 66 if (RI) { 67 // The exit block of predecessing regions must be changed to NewEntering 68 for (BasicBlock *ExitPred : predecessors(NewEntering)) { 69 Region *RegionOfPred = RI->getRegionFor(ExitPred); 70 if (RegionOfPred->getExit() != Entry) 71 continue; 72 73 while (!RegionOfPred->isTopLevelRegion() && 74 RegionOfPred->getExit() == Entry) { 75 RegionOfPred->replaceExit(NewEntering); 76 RegionOfPred = RegionOfPred->getParent(); 77 } 78 } 79 80 // Make all ancestors use EnteringBB as entry; there might be edges to it 81 Region *AncestorR = R->getParent(); 82 RI->setRegionFor(NewEntering, AncestorR); 83 while (!AncestorR->isTopLevelRegion() && AncestorR->getEntry() == Entry) { 84 AncestorR->replaceEntry(NewEntering); 85 AncestorR = AncestorR->getParent(); 86 } 87 } 88 89 EnteringBB = NewEntering; 90 } 91 assert(R->getEnteringBlock() == EnteringBB); 92 93 // After: 94 // 95 // \ / // 96 // EnteringBB // 97 // | // 98 // | // 99 // Entry <--\ // 100 // / \ / // 101 // .... // 102 } 103 104 // Ensure that the region has a single block that branches to the exit node. 105 static void simplifyRegionExit(Region *R, DominatorTree *DT, LoopInfo *LI, 106 RegionInfo *RI) { 107 BasicBlock *ExitBB = R->getExit(); 108 BasicBlock *ExitingBB = R->getExitingBlock(); 109 110 // Before: 111 // 112 // (Region) ______/ // 113 // \ | / // 114 // ExitBB // 115 // / \ // 116 117 if (!ExitingBB) { 118 SmallVector<BasicBlock *, 4> Preds; 119 for (BasicBlock *P : predecessors(ExitBB)) 120 if (R->contains(P)) 121 Preds.push_back(P); 122 123 // Preds[0] Preds[1] otherBB // 124 // \ | ________/ // 125 // \ | / // 126 // BB // 127 ExitingBB = 128 SplitBlockPredecessors(ExitBB, Preds, ".region_exiting", DT, LI); 129 // Preds[0] Preds[1] otherBB // 130 // \ / / // 131 // BB.region_exiting / // 132 // \ / // 133 // BB // 134 135 if (RI) 136 RI->setRegionFor(ExitingBB, R); 137 138 // Change the exit of nested regions, but not the region itself, 139 R->replaceExitRecursive(ExitingBB); 140 R->replaceExit(ExitBB); 141 } 142 assert(ExitingBB == R->getExitingBlock()); 143 144 // After: 145 // 146 // \ / // 147 // ExitingBB _____/ // 148 // \ / // 149 // ExitBB // 150 // / \ // 151 } 152 153 void polly::simplifyRegion(Region *R, DominatorTree *DT, LoopInfo *LI, 154 RegionInfo *RI) { 155 assert(R && !R->isTopLevelRegion()); 156 assert(!RI || RI == R->getRegionInfo()); 157 assert((!RI || DT) && 158 "RegionInfo requires DominatorTree to be updated as well"); 159 160 simplifyRegionEntry(R, DT, LI, RI); 161 simplifyRegionExit(R, DT, LI, RI); 162 assert(R->isSimple()); 163 } 164 165 // Split the block into two successive blocks. 166 // 167 // Like llvm::SplitBlock, but also preserves RegionInfo 168 static BasicBlock *splitBlock(BasicBlock *Old, Instruction *SplitPt, 169 DominatorTree *DT, llvm::LoopInfo *LI, 170 RegionInfo *RI) { 171 assert(Old && SplitPt); 172 173 // Before: 174 // 175 // \ / // 176 // Old // 177 // / \ // 178 179 BasicBlock *NewBlock = llvm::SplitBlock(Old, SplitPt, DT, LI); 180 181 if (RI) { 182 Region *R = RI->getRegionFor(Old); 183 RI->setRegionFor(NewBlock, R); 184 } 185 186 // After: 187 // 188 // \ / // 189 // Old // 190 // | // 191 // NewBlock // 192 // / \ // 193 194 return NewBlock; 195 } 196 197 void polly::splitEntryBlockForAlloca(BasicBlock *EntryBlock, DominatorTree *DT, 198 LoopInfo *LI, RegionInfo *RI) { 199 // Find first non-alloca instruction. Every basic block has a non-alloca 200 // instruction, as every well formed basic block has a terminator. 201 BasicBlock::iterator I = EntryBlock->begin(); 202 while (isa<AllocaInst>(I)) 203 ++I; 204 205 // splitBlock updates DT, LI and RI. 206 splitBlock(EntryBlock, &*I, DT, LI, RI); 207 } 208 209 void polly::splitEntryBlockForAlloca(BasicBlock *EntryBlock, Pass *P) { 210 auto *DTWP = P->getAnalysisIfAvailable<DominatorTreeWrapperPass>(); 211 auto *DT = DTWP ? &DTWP->getDomTree() : nullptr; 212 auto *LIWP = P->getAnalysisIfAvailable<LoopInfoWrapperPass>(); 213 auto *LI = LIWP ? &LIWP->getLoopInfo() : nullptr; 214 RegionInfoPass *RIP = P->getAnalysisIfAvailable<RegionInfoPass>(); 215 RegionInfo *RI = RIP ? &RIP->getRegionInfo() : nullptr; 216 217 // splitBlock updates DT, LI and RI. 218 polly::splitEntryBlockForAlloca(EntryBlock, DT, LI, RI); 219 } 220 221 /// The SCEVExpander will __not__ generate any code for an existing SDiv/SRem 222 /// instruction but just use it, if it is referenced as a SCEVUnknown. We want 223 /// however to generate new code if the instruction is in the analyzed region 224 /// and we generate code outside/in front of that region. Hence, we generate the 225 /// code for the SDiv/SRem operands in front of the analyzed region and then 226 /// create a new SDiv/SRem operation there too. 227 struct ScopExpander : SCEVVisitor<ScopExpander, const SCEV *> { 228 friend struct SCEVVisitor<ScopExpander, const SCEV *>; 229 230 explicit ScopExpander(const Region &R, ScalarEvolution &SE, 231 const DataLayout &DL, const char *Name, ValueMapT *VMap, 232 BasicBlock *RTCBB) 233 : Expander(SCEVExpander(SE, DL, Name)), SE(SE), Name(Name), R(R), 234 VMap(VMap), RTCBB(RTCBB) {} 235 236 Value *expandCodeFor(const SCEV *E, Type *Ty, Instruction *I) { 237 // If we generate code in the region we will immediately fall back to the 238 // SCEVExpander, otherwise we will stop at all unknowns in the SCEV and if 239 // needed replace them by copies computed in the entering block. 240 if (!R.contains(I)) 241 E = visit(E); 242 return Expander.expandCodeFor(E, Ty, I); 243 } 244 245 private: 246 SCEVExpander Expander; 247 ScalarEvolution &SE; 248 const char *Name; 249 const Region &R; 250 ValueMapT *VMap; 251 BasicBlock *RTCBB; 252 253 const SCEV *visitGenericInst(const SCEVUnknown *E, Instruction *Inst, 254 Instruction *IP) { 255 if (!Inst || !R.contains(Inst)) 256 return E; 257 258 assert(!Inst->mayThrow() && !Inst->mayReadOrWriteMemory() && 259 !isa<PHINode>(Inst)); 260 261 auto *InstClone = Inst->clone(); 262 for (auto &Op : Inst->operands()) { 263 assert(SE.isSCEVable(Op->getType())); 264 auto *OpSCEV = SE.getSCEV(Op); 265 auto *OpClone = expandCodeFor(OpSCEV, Op->getType(), IP); 266 InstClone->replaceUsesOfWith(Op, OpClone); 267 } 268 269 InstClone->setName(Name + Inst->getName()); 270 InstClone->insertBefore(IP); 271 return SE.getSCEV(InstClone); 272 } 273 274 const SCEV *visitUnknown(const SCEVUnknown *E) { 275 276 // If a value mapping was given try if the underlying value is remapped. 277 Value *NewVal = VMap ? VMap->lookup(E->getValue()) : nullptr; 278 if (NewVal) { 279 auto *NewE = SE.getSCEV(NewVal); 280 281 // While the mapped value might be different the SCEV representation might 282 // not be. To this end we will check before we go into recursion here. 283 if (E != NewE) 284 return visit(NewE); 285 } 286 287 Instruction *Inst = dyn_cast<Instruction>(E->getValue()); 288 Instruction *IP; 289 if (Inst && !R.contains(Inst)) 290 IP = Inst; 291 else if (Inst && RTCBB->getParent() == Inst->getFunction()) 292 IP = RTCBB->getTerminator(); 293 else 294 IP = RTCBB->getParent()->getEntryBlock().getTerminator(); 295 296 if (!Inst || (Inst->getOpcode() != Instruction::SRem && 297 Inst->getOpcode() != Instruction::SDiv)) 298 return visitGenericInst(E, Inst, IP); 299 300 const SCEV *LHSScev = SE.getSCEV(Inst->getOperand(0)); 301 const SCEV *RHSScev = SE.getSCEV(Inst->getOperand(1)); 302 303 if (!SE.isKnownNonZero(RHSScev)) 304 RHSScev = SE.getUMaxExpr(RHSScev, SE.getConstant(E->getType(), 1)); 305 306 Value *LHS = expandCodeFor(LHSScev, E->getType(), IP); 307 Value *RHS = expandCodeFor(RHSScev, E->getType(), IP); 308 309 Inst = BinaryOperator::Create((Instruction::BinaryOps)Inst->getOpcode(), 310 LHS, RHS, Inst->getName() + Name, IP); 311 return SE.getSCEV(Inst); 312 } 313 314 /// The following functions will just traverse the SCEV and rebuild it with 315 /// the new operands returned by the traversal. 316 /// 317 ///{ 318 const SCEV *visitConstant(const SCEVConstant *E) { return E; } 319 const SCEV *visitTruncateExpr(const SCEVTruncateExpr *E) { 320 return SE.getTruncateExpr(visit(E->getOperand()), E->getType()); 321 } 322 const SCEV *visitZeroExtendExpr(const SCEVZeroExtendExpr *E) { 323 return SE.getZeroExtendExpr(visit(E->getOperand()), E->getType()); 324 } 325 const SCEV *visitSignExtendExpr(const SCEVSignExtendExpr *E) { 326 return SE.getSignExtendExpr(visit(E->getOperand()), E->getType()); 327 } 328 const SCEV *visitUDivExpr(const SCEVUDivExpr *E) { 329 auto *RHSScev = visit(E->getRHS()); 330 if (!SE.isKnownNonZero(RHSScev)) 331 RHSScev = SE.getUMaxExpr(RHSScev, SE.getConstant(E->getType(), 1)); 332 return SE.getUDivExpr(visit(E->getLHS()), RHSScev); 333 } 334 const SCEV *visitAddExpr(const SCEVAddExpr *E) { 335 SmallVector<const SCEV *, 4> NewOps; 336 for (const SCEV *Op : E->operands()) 337 NewOps.push_back(visit(Op)); 338 return SE.getAddExpr(NewOps); 339 } 340 const SCEV *visitMulExpr(const SCEVMulExpr *E) { 341 SmallVector<const SCEV *, 4> NewOps; 342 for (const SCEV *Op : E->operands()) 343 NewOps.push_back(visit(Op)); 344 return SE.getMulExpr(NewOps); 345 } 346 const SCEV *visitUMaxExpr(const SCEVUMaxExpr *E) { 347 SmallVector<const SCEV *, 4> NewOps; 348 for (const SCEV *Op : E->operands()) 349 NewOps.push_back(visit(Op)); 350 return SE.getUMaxExpr(NewOps); 351 } 352 const SCEV *visitSMaxExpr(const SCEVSMaxExpr *E) { 353 SmallVector<const SCEV *, 4> NewOps; 354 for (const SCEV *Op : E->operands()) 355 NewOps.push_back(visit(Op)); 356 return SE.getSMaxExpr(NewOps); 357 } 358 const SCEV *visitAddRecExpr(const SCEVAddRecExpr *E) { 359 SmallVector<const SCEV *, 4> NewOps; 360 for (const SCEV *Op : E->operands()) 361 NewOps.push_back(visit(Op)); 362 return SE.getAddRecExpr(NewOps, E->getLoop(), E->getNoWrapFlags()); 363 } 364 ///} 365 }; 366 367 Value *polly::expandCodeFor(Scop &S, ScalarEvolution &SE, const DataLayout &DL, 368 const char *Name, const SCEV *E, Type *Ty, 369 Instruction *IP, ValueMapT *VMap, 370 BasicBlock *RTCBB) { 371 ScopExpander Expander(S.getRegion(), SE, DL, Name, VMap, RTCBB); 372 return Expander.expandCodeFor(E, Ty, IP); 373 } 374 375 bool polly::isErrorBlock(BasicBlock &BB, const Region &R, LoopInfo &LI, 376 const DominatorTree &DT) { 377 if (!PollyAllowErrorBlocks) 378 return false; 379 380 if (isa<UnreachableInst>(BB.getTerminator())) 381 return true; 382 383 if (LI.isLoopHeader(&BB)) 384 return false; 385 386 // Basic blocks that are always executed are not considered error blocks, 387 // as their execution can not be a rare event. 388 bool DominatesAllPredecessors = true; 389 if (R.isTopLevelRegion()) { 390 for (BasicBlock &I : *R.getEntry()->getParent()) 391 if (isa<ReturnInst>(I.getTerminator()) && !DT.dominates(&BB, &I)) 392 DominatesAllPredecessors = false; 393 } else { 394 for (auto Pred : predecessors(R.getExit())) 395 if (R.contains(Pred) && !DT.dominates(&BB, Pred)) 396 DominatesAllPredecessors = false; 397 } 398 399 if (DominatesAllPredecessors) 400 return false; 401 402 for (Instruction &Inst : BB) 403 if (CallInst *CI = dyn_cast<CallInst>(&Inst)) { 404 if (isIgnoredIntrinsic(CI)) 405 continue; 406 407 // memset, memcpy and memmove are modeled intrinsics. 408 if (isa<MemSetInst>(CI) || isa<MemTransferInst>(CI)) 409 continue; 410 411 if (!CI->doesNotAccessMemory()) 412 return true; 413 if (CI->doesNotReturn()) 414 return true; 415 } 416 417 return false; 418 } 419 420 Value *polly::getConditionFromTerminator(TerminatorInst *TI) { 421 if (BranchInst *BR = dyn_cast<BranchInst>(TI)) { 422 if (BR->isUnconditional()) 423 return ConstantInt::getTrue(Type::getInt1Ty(TI->getContext())); 424 425 return BR->getCondition(); 426 } 427 428 if (SwitchInst *SI = dyn_cast<SwitchInst>(TI)) 429 return SI->getCondition(); 430 431 return nullptr; 432 } 433 434 bool polly::isHoistableLoad(LoadInst *LInst, Region &R, LoopInfo &LI, 435 ScalarEvolution &SE, const DominatorTree &DT) { 436 Loop *L = LI.getLoopFor(LInst->getParent()); 437 auto *Ptr = LInst->getPointerOperand(); 438 const SCEV *PtrSCEV = SE.getSCEVAtScope(Ptr, L); 439 while (L && R.contains(L)) { 440 if (!SE.isLoopInvariant(PtrSCEV, L)) 441 return false; 442 L = L->getParentLoop(); 443 } 444 445 for (auto *User : Ptr->users()) { 446 auto *UserI = dyn_cast<Instruction>(User); 447 if (!UserI || !R.contains(UserI)) 448 continue; 449 if (!UserI->mayWriteToMemory()) 450 continue; 451 452 auto &BB = *UserI->getParent(); 453 if (DT.dominates(&BB, LInst->getParent())) 454 return false; 455 456 bool DominatesAllPredecessors = true; 457 if (R.isTopLevelRegion()) { 458 for (BasicBlock &I : *R.getEntry()->getParent()) 459 if (isa<ReturnInst>(I.getTerminator()) && !DT.dominates(&BB, &I)) 460 DominatesAllPredecessors = false; 461 } else { 462 for (auto Pred : predecessors(R.getExit())) 463 if (R.contains(Pred) && !DT.dominates(&BB, Pred)) 464 DominatesAllPredecessors = false; 465 } 466 467 if (!DominatesAllPredecessors) 468 continue; 469 470 return false; 471 } 472 473 return true; 474 } 475 476 bool polly::isIgnoredIntrinsic(const Value *V) { 477 if (auto *IT = dyn_cast<IntrinsicInst>(V)) { 478 switch (IT->getIntrinsicID()) { 479 // Lifetime markers are supported/ignored. 480 case llvm::Intrinsic::lifetime_start: 481 case llvm::Intrinsic::lifetime_end: 482 // Invariant markers are supported/ignored. 483 case llvm::Intrinsic::invariant_start: 484 case llvm::Intrinsic::invariant_end: 485 // Some misc annotations are supported/ignored. 486 case llvm::Intrinsic::var_annotation: 487 case llvm::Intrinsic::ptr_annotation: 488 case llvm::Intrinsic::annotation: 489 case llvm::Intrinsic::donothing: 490 case llvm::Intrinsic::assume: 491 // Some debug info intrinsics are supported/ignored. 492 case llvm::Intrinsic::dbg_value: 493 case llvm::Intrinsic::dbg_declare: 494 return true; 495 default: 496 break; 497 } 498 } 499 return false; 500 } 501 502 bool polly::canSynthesize(const Value *V, const Scop &S, ScalarEvolution *SE, 503 Loop *Scope) { 504 if (!V || !SE->isSCEVable(V->getType())) 505 return false; 506 507 const InvariantLoadsSetTy &ILS = S.getRequiredInvariantLoads(); 508 if (const SCEV *Scev = SE->getSCEVAtScope(const_cast<Value *>(V), Scope)) 509 if (!isa<SCEVCouldNotCompute>(Scev)) 510 if (!hasScalarDepsInsideRegion(Scev, &S.getRegion(), Scope, false, ILS)) 511 return true; 512 513 return false; 514 } 515 516 llvm::BasicBlock *polly::getUseBlock(const llvm::Use &U) { 517 Instruction *UI = dyn_cast<Instruction>(U.getUser()); 518 if (!UI) 519 return nullptr; 520 521 if (PHINode *PHI = dyn_cast<PHINode>(UI)) 522 return PHI->getIncomingBlock(U); 523 524 return UI->getParent(); 525 } 526 527 std::tuple<std::vector<const SCEV *>, std::vector<int>> 528 polly::getIndexExpressionsFromGEP(GetElementPtrInst *GEP, ScalarEvolution &SE) { 529 std::vector<const SCEV *> Subscripts; 530 std::vector<int> Sizes; 531 532 Type *Ty = GEP->getPointerOperandType(); 533 534 bool DroppedFirstDim = false; 535 536 for (unsigned i = 1; i < GEP->getNumOperands(); i++) { 537 538 const SCEV *Expr = SE.getSCEV(GEP->getOperand(i)); 539 540 if (i == 1) { 541 if (auto *PtrTy = dyn_cast<PointerType>(Ty)) { 542 Ty = PtrTy->getElementType(); 543 } else if (auto *ArrayTy = dyn_cast<ArrayType>(Ty)) { 544 Ty = ArrayTy->getElementType(); 545 } else { 546 Subscripts.clear(); 547 Sizes.clear(); 548 break; 549 } 550 if (auto *Const = dyn_cast<SCEVConstant>(Expr)) 551 if (Const->getValue()->isZero()) { 552 DroppedFirstDim = true; 553 continue; 554 } 555 Subscripts.push_back(Expr); 556 continue; 557 } 558 559 auto *ArrayTy = dyn_cast<ArrayType>(Ty); 560 if (!ArrayTy) { 561 Subscripts.clear(); 562 Sizes.clear(); 563 break; 564 } 565 566 Subscripts.push_back(Expr); 567 if (!(DroppedFirstDim && i == 2)) 568 Sizes.push_back(ArrayTy->getNumElements()); 569 570 Ty = ArrayTy->getElementType(); 571 } 572 573 return std::make_tuple(Subscripts, Sizes); 574 } 575 576 llvm::Loop *polly::getFirstNonBoxedLoopFor(llvm::Loop *L, llvm::LoopInfo &LI, 577 const BoxedLoopsSetTy &BoxedLoops) { 578 while (BoxedLoops.count(L)) 579 L = L->getParentLoop(); 580 return L; 581 } 582 583 llvm::Loop *polly::getFirstNonBoxedLoopFor(llvm::BasicBlock *BB, 584 llvm::LoopInfo &LI, 585 const BoxedLoopsSetTy &BoxedLoops) { 586 Loop *L = LI.getLoopFor(BB); 587 return getFirstNonBoxedLoopFor(L, LI, BoxedLoops); 588 } 589