1 //===- FunctionAttrs.cpp - Pass which marks functions attributes ----------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 /// \file 11 /// This file implements interprocedural passes which walk the 12 /// call-graph deducing and/or propagating function attributes. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "llvm/Transforms/IPO/FunctionAttrs.h" 17 #include "llvm/ADT/SCCIterator.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SetVector.h" 20 #include "llvm/ADT/SmallPtrSet.h" 21 #include "llvm/ADT/SmallSet.h" 22 #include "llvm/ADT/SmallVector.h" 23 #include "llvm/ADT/Statistic.h" 24 #include "llvm/Analysis/AliasAnalysis.h" 25 #include "llvm/Analysis/AssumptionCache.h" 26 #include "llvm/Analysis/BasicAliasAnalysis.h" 27 #include "llvm/Analysis/CGSCCPassManager.h" 28 #include "llvm/Analysis/CallGraph.h" 29 #include "llvm/Analysis/CallGraphSCCPass.h" 30 #include "llvm/Analysis/CaptureTracking.h" 31 #include "llvm/Analysis/LazyCallGraph.h" 32 #include "llvm/Analysis/MemoryLocation.h" 33 #include "llvm/Analysis/ValueTracking.h" 34 #include "llvm/IR/Argument.h" 35 #include "llvm/IR/Attributes.h" 36 #include "llvm/IR/BasicBlock.h" 37 #include "llvm/IR/CallSite.h" 38 #include "llvm/IR/Constant.h" 39 #include "llvm/IR/Constants.h" 40 #include "llvm/IR/Function.h" 41 #include "llvm/IR/InstIterator.h" 42 #include "llvm/IR/InstrTypes.h" 43 #include "llvm/IR/Instruction.h" 44 #include "llvm/IR/Instructions.h" 45 #include "llvm/IR/Metadata.h" 46 #include "llvm/IR/PassManager.h" 47 #include "llvm/IR/Type.h" 48 #include "llvm/IR/Use.h" 49 #include "llvm/IR/User.h" 50 #include "llvm/IR/Value.h" 51 #include "llvm/Pass.h" 52 #include "llvm/Support/Casting.h" 53 #include "llvm/Support/CommandLine.h" 54 #include "llvm/Support/Compiler.h" 55 #include "llvm/Support/Debug.h" 56 #include "llvm/Support/ErrorHandling.h" 57 #include "llvm/Support/raw_ostream.h" 58 #include "llvm/Transforms/IPO.h" 59 #include <cassert> 60 #include <iterator> 61 #include <map> 62 #include <vector> 63 64 using namespace llvm; 65 66 #define DEBUG_TYPE "functionattrs" 67 68 STATISTIC(NumReadNone, "Number of functions marked readnone"); 69 STATISTIC(NumReadOnly, "Number of functions marked readonly"); 70 STATISTIC(NumNoCapture, "Number of arguments marked nocapture"); 71 STATISTIC(NumReturned, "Number of arguments marked returned"); 72 STATISTIC(NumReadNoneArg, "Number of arguments marked readnone"); 73 STATISTIC(NumReadOnlyArg, "Number of arguments marked readonly"); 74 STATISTIC(NumNoAlias, "Number of function returns marked noalias"); 75 STATISTIC(NumNonNullReturn, "Number of function returns marked nonnull"); 76 STATISTIC(NumNoRecurse, "Number of functions marked as norecurse"); 77 78 // FIXME: This is disabled by default to avoid exposing security vulnerabilities 79 // in C/C++ code compiled by clang: 80 // http://lists.llvm.org/pipermail/cfe-dev/2017-January/052066.html 81 static cl::opt<bool> EnableNonnullArgPropagation( 82 "enable-nonnull-arg-prop", cl::Hidden, 83 cl::desc("Try to propagate nonnull argument attributes from callsites to " 84 "caller functions.")); 85 86 namespace { 87 88 using SCCNodeSet = SmallSetVector<Function *, 8>; 89 90 } // end anonymous namespace 91 92 /// Returns the memory access attribute for function F using AAR for AA results, 93 /// where SCCNodes is the current SCC. 94 /// 95 /// If ThisBody is true, this function may examine the function body and will 96 /// return a result pertaining to this copy of the function. If it is false, the 97 /// result will be based only on AA results for the function declaration; it 98 /// will be assumed that some other (perhaps less optimized) version of the 99 /// function may be selected at link time. 100 static MemoryAccessKind checkFunctionMemoryAccess(Function &F, bool ThisBody, 101 AAResults &AAR, 102 const SCCNodeSet &SCCNodes) { 103 FunctionModRefBehavior MRB = AAR.getModRefBehavior(&F); 104 if (MRB == FMRB_DoesNotAccessMemory) 105 // Already perfect! 106 return MAK_ReadNone; 107 108 if (!ThisBody) { 109 if (AliasAnalysis::onlyReadsMemory(MRB)) 110 return MAK_ReadOnly; 111 112 // Conservatively assume it writes to memory. 113 return MAK_MayWrite; 114 } 115 116 // Scan the function body for instructions that may read or write memory. 117 bool ReadsMemory = false; 118 for (inst_iterator II = inst_begin(F), E = inst_end(F); II != E; ++II) { 119 Instruction *I = &*II; 120 121 // Some instructions can be ignored even if they read or write memory. 122 // Detect these now, skipping to the next instruction if one is found. 123 CallSite CS(cast<Value>(I)); 124 if (CS) { 125 // Ignore calls to functions in the same SCC, as long as the call sites 126 // don't have operand bundles. Calls with operand bundles are allowed to 127 // have memory effects not described by the memory effects of the call 128 // target. 129 if (!CS.hasOperandBundles() && CS.getCalledFunction() && 130 SCCNodes.count(CS.getCalledFunction())) 131 continue; 132 FunctionModRefBehavior MRB = AAR.getModRefBehavior(CS); 133 ModRefInfo MRI = createModRefInfo(MRB); 134 135 // If the call doesn't access memory, we're done. 136 if (isNoModRef(MRI)) 137 continue; 138 139 if (!AliasAnalysis::onlyAccessesArgPointees(MRB)) { 140 // The call could access any memory. If that includes writes, give up. 141 if (isModSet(MRI)) 142 return MAK_MayWrite; 143 // If it reads, note it. 144 if (isRefSet(MRI)) 145 ReadsMemory = true; 146 continue; 147 } 148 149 // Check whether all pointer arguments point to local memory, and 150 // ignore calls that only access local memory. 151 for (CallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end(); 152 CI != CE; ++CI) { 153 Value *Arg = *CI; 154 if (!Arg->getType()->isPtrOrPtrVectorTy()) 155 continue; 156 157 AAMDNodes AAInfo; 158 I->getAAMetadata(AAInfo); 159 MemoryLocation Loc(Arg, MemoryLocation::UnknownSize, AAInfo); 160 161 // Skip accesses to local or constant memory as they don't impact the 162 // externally visible mod/ref behavior. 163 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 164 continue; 165 166 if (isModSet(MRI)) 167 // Writes non-local memory. Give up. 168 return MAK_MayWrite; 169 if (isRefSet(MRI)) 170 // Ok, it reads non-local memory. 171 ReadsMemory = true; 172 } 173 continue; 174 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) { 175 // Ignore non-volatile loads from local memory. (Atomic is okay here.) 176 if (!LI->isVolatile()) { 177 MemoryLocation Loc = MemoryLocation::get(LI); 178 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 179 continue; 180 } 181 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { 182 // Ignore non-volatile stores to local memory. (Atomic is okay here.) 183 if (!SI->isVolatile()) { 184 MemoryLocation Loc = MemoryLocation::get(SI); 185 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 186 continue; 187 } 188 } else if (VAArgInst *VI = dyn_cast<VAArgInst>(I)) { 189 // Ignore vaargs on local memory. 190 MemoryLocation Loc = MemoryLocation::get(VI); 191 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 192 continue; 193 } 194 195 // Any remaining instructions need to be taken seriously! Check if they 196 // read or write memory. 197 if (I->mayWriteToMemory()) 198 // Writes memory. Just give up. 199 return MAK_MayWrite; 200 201 // If this instruction may read memory, remember that. 202 ReadsMemory |= I->mayReadFromMemory(); 203 } 204 205 return ReadsMemory ? MAK_ReadOnly : MAK_ReadNone; 206 } 207 208 MemoryAccessKind llvm::computeFunctionBodyMemoryAccess(Function &F, 209 AAResults &AAR) { 210 return checkFunctionMemoryAccess(F, /*ThisBody=*/true, AAR, {}); 211 } 212 213 /// Deduce readonly/readnone attributes for the SCC. 214 template <typename AARGetterT> 215 static bool addReadAttrs(const SCCNodeSet &SCCNodes, AARGetterT &&AARGetter) { 216 // Check if any of the functions in the SCC read or write memory. If they 217 // write memory then they can't be marked readnone or readonly. 218 bool ReadsMemory = false; 219 for (Function *F : SCCNodes) { 220 // Call the callable parameter to look up AA results for this function. 221 AAResults &AAR = AARGetter(*F); 222 223 // Non-exact function definitions may not be selected at link time, and an 224 // alternative version that writes to memory may be selected. See the 225 // comment on GlobalValue::isDefinitionExact for more details. 226 switch (checkFunctionMemoryAccess(*F, F->hasExactDefinition(), 227 AAR, SCCNodes)) { 228 case MAK_MayWrite: 229 return false; 230 case MAK_ReadOnly: 231 ReadsMemory = true; 232 break; 233 case MAK_ReadNone: 234 // Nothing to do! 235 break; 236 } 237 } 238 239 // Success! Functions in this SCC do not access memory, or only read memory. 240 // Give them the appropriate attribute. 241 bool MadeChange = false; 242 for (Function *F : SCCNodes) { 243 if (F->doesNotAccessMemory()) 244 // Already perfect! 245 continue; 246 247 if (F->onlyReadsMemory() && ReadsMemory) 248 // No change. 249 continue; 250 251 MadeChange = true; 252 253 // Clear out any existing attributes. 254 F->removeFnAttr(Attribute::ReadOnly); 255 F->removeFnAttr(Attribute::ReadNone); 256 257 // Add in the new attribute. 258 F->addFnAttr(ReadsMemory ? Attribute::ReadOnly : Attribute::ReadNone); 259 260 if (ReadsMemory) 261 ++NumReadOnly; 262 else 263 ++NumReadNone; 264 } 265 266 return MadeChange; 267 } 268 269 namespace { 270 271 /// For a given pointer Argument, this retains a list of Arguments of functions 272 /// in the same SCC that the pointer data flows into. We use this to build an 273 /// SCC of the arguments. 274 struct ArgumentGraphNode { 275 Argument *Definition; 276 SmallVector<ArgumentGraphNode *, 4> Uses; 277 }; 278 279 class ArgumentGraph { 280 // We store pointers to ArgumentGraphNode objects, so it's important that 281 // that they not move around upon insert. 282 using ArgumentMapTy = std::map<Argument *, ArgumentGraphNode>; 283 284 ArgumentMapTy ArgumentMap; 285 286 // There is no root node for the argument graph, in fact: 287 // void f(int *x, int *y) { if (...) f(x, y); } 288 // is an example where the graph is disconnected. The SCCIterator requires a 289 // single entry point, so we maintain a fake ("synthetic") root node that 290 // uses every node. Because the graph is directed and nothing points into 291 // the root, it will not participate in any SCCs (except for its own). 292 ArgumentGraphNode SyntheticRoot; 293 294 public: 295 ArgumentGraph() { SyntheticRoot.Definition = nullptr; } 296 297 using iterator = SmallVectorImpl<ArgumentGraphNode *>::iterator; 298 299 iterator begin() { return SyntheticRoot.Uses.begin(); } 300 iterator end() { return SyntheticRoot.Uses.end(); } 301 ArgumentGraphNode *getEntryNode() { return &SyntheticRoot; } 302 303 ArgumentGraphNode *operator[](Argument *A) { 304 ArgumentGraphNode &Node = ArgumentMap[A]; 305 Node.Definition = A; 306 SyntheticRoot.Uses.push_back(&Node); 307 return &Node; 308 } 309 }; 310 311 /// This tracker checks whether callees are in the SCC, and if so it does not 312 /// consider that a capture, instead adding it to the "Uses" list and 313 /// continuing with the analysis. 314 struct ArgumentUsesTracker : public CaptureTracker { 315 ArgumentUsesTracker(const SCCNodeSet &SCCNodes) : SCCNodes(SCCNodes) {} 316 317 void tooManyUses() override { Captured = true; } 318 319 bool captured(const Use *U) override { 320 CallSite CS(U->getUser()); 321 if (!CS.getInstruction()) { 322 Captured = true; 323 return true; 324 } 325 326 Function *F = CS.getCalledFunction(); 327 if (!F || !F->hasExactDefinition() || !SCCNodes.count(F)) { 328 Captured = true; 329 return true; 330 } 331 332 // Note: the callee and the two successor blocks *follow* the argument 333 // operands. This means there is no need to adjust UseIndex to account for 334 // these. 335 336 unsigned UseIndex = 337 std::distance(const_cast<const Use *>(CS.arg_begin()), U); 338 339 assert(UseIndex < CS.data_operands_size() && 340 "Indirect function calls should have been filtered above!"); 341 342 if (UseIndex >= CS.getNumArgOperands()) { 343 // Data operand, but not a argument operand -- must be a bundle operand 344 assert(CS.hasOperandBundles() && "Must be!"); 345 346 // CaptureTracking told us that we're being captured by an operand bundle 347 // use. In this case it does not matter if the callee is within our SCC 348 // or not -- we've been captured in some unknown way, and we have to be 349 // conservative. 350 Captured = true; 351 return true; 352 } 353 354 if (UseIndex >= F->arg_size()) { 355 assert(F->isVarArg() && "More params than args in non-varargs call"); 356 Captured = true; 357 return true; 358 } 359 360 Uses.push_back(&*std::next(F->arg_begin(), UseIndex)); 361 return false; 362 } 363 364 // True only if certainly captured (used outside our SCC). 365 bool Captured = false; 366 367 // Uses within our SCC. 368 SmallVector<Argument *, 4> Uses; 369 370 const SCCNodeSet &SCCNodes; 371 }; 372 373 } // end anonymous namespace 374 375 namespace llvm { 376 377 template <> struct GraphTraits<ArgumentGraphNode *> { 378 using NodeRef = ArgumentGraphNode *; 379 using ChildIteratorType = SmallVectorImpl<ArgumentGraphNode *>::iterator; 380 381 static NodeRef getEntryNode(NodeRef A) { return A; } 382 static ChildIteratorType child_begin(NodeRef N) { return N->Uses.begin(); } 383 static ChildIteratorType child_end(NodeRef N) { return N->Uses.end(); } 384 }; 385 386 template <> 387 struct GraphTraits<ArgumentGraph *> : public GraphTraits<ArgumentGraphNode *> { 388 static NodeRef getEntryNode(ArgumentGraph *AG) { return AG->getEntryNode(); } 389 390 static ChildIteratorType nodes_begin(ArgumentGraph *AG) { 391 return AG->begin(); 392 } 393 394 static ChildIteratorType nodes_end(ArgumentGraph *AG) { return AG->end(); } 395 }; 396 397 } // end namespace llvm 398 399 /// Returns Attribute::None, Attribute::ReadOnly or Attribute::ReadNone. 400 static Attribute::AttrKind 401 determinePointerReadAttrs(Argument *A, 402 const SmallPtrSet<Argument *, 8> &SCCNodes) { 403 SmallVector<Use *, 32> Worklist; 404 SmallSet<Use *, 32> Visited; 405 406 // inalloca arguments are always clobbered by the call. 407 if (A->hasInAllocaAttr()) 408 return Attribute::None; 409 410 bool IsRead = false; 411 // We don't need to track IsWritten. If A is written to, return immediately. 412 413 for (Use &U : A->uses()) { 414 Visited.insert(&U); 415 Worklist.push_back(&U); 416 } 417 418 while (!Worklist.empty()) { 419 Use *U = Worklist.pop_back_val(); 420 Instruction *I = cast<Instruction>(U->getUser()); 421 422 switch (I->getOpcode()) { 423 case Instruction::BitCast: 424 case Instruction::GetElementPtr: 425 case Instruction::PHI: 426 case Instruction::Select: 427 case Instruction::AddrSpaceCast: 428 // The original value is not read/written via this if the new value isn't. 429 for (Use &UU : I->uses()) 430 if (Visited.insert(&UU).second) 431 Worklist.push_back(&UU); 432 break; 433 434 case Instruction::Call: 435 case Instruction::Invoke: { 436 bool Captures = true; 437 438 if (I->getType()->isVoidTy()) 439 Captures = false; 440 441 auto AddUsersToWorklistIfCapturing = [&] { 442 if (Captures) 443 for (Use &UU : I->uses()) 444 if (Visited.insert(&UU).second) 445 Worklist.push_back(&UU); 446 }; 447 448 CallSite CS(I); 449 if (CS.doesNotAccessMemory()) { 450 AddUsersToWorklistIfCapturing(); 451 continue; 452 } 453 454 Function *F = CS.getCalledFunction(); 455 if (!F) { 456 if (CS.onlyReadsMemory()) { 457 IsRead = true; 458 AddUsersToWorklistIfCapturing(); 459 continue; 460 } 461 return Attribute::None; 462 } 463 464 // Note: the callee and the two successor blocks *follow* the argument 465 // operands. This means there is no need to adjust UseIndex to account 466 // for these. 467 468 unsigned UseIndex = std::distance(CS.arg_begin(), U); 469 470 // U cannot be the callee operand use: since we're exploring the 471 // transitive uses of an Argument, having such a use be a callee would 472 // imply the CallSite is an indirect call or invoke; and we'd take the 473 // early exit above. 474 assert(UseIndex < CS.data_operands_size() && 475 "Data operand use expected!"); 476 477 bool IsOperandBundleUse = UseIndex >= CS.getNumArgOperands(); 478 479 if (UseIndex >= F->arg_size() && !IsOperandBundleUse) { 480 assert(F->isVarArg() && "More params than args in non-varargs call"); 481 return Attribute::None; 482 } 483 484 Captures &= !CS.doesNotCapture(UseIndex); 485 486 // Since the optimizer (by design) cannot see the data flow corresponding 487 // to a operand bundle use, these cannot participate in the optimistic SCC 488 // analysis. Instead, we model the operand bundle uses as arguments in 489 // call to a function external to the SCC. 490 if (IsOperandBundleUse || 491 !SCCNodes.count(&*std::next(F->arg_begin(), UseIndex))) { 492 493 // The accessors used on CallSite here do the right thing for calls and 494 // invokes with operand bundles. 495 496 if (!CS.onlyReadsMemory() && !CS.onlyReadsMemory(UseIndex)) 497 return Attribute::None; 498 if (!CS.doesNotAccessMemory(UseIndex)) 499 IsRead = true; 500 } 501 502 AddUsersToWorklistIfCapturing(); 503 break; 504 } 505 506 case Instruction::Load: 507 // A volatile load has side effects beyond what readonly can be relied 508 // upon. 509 if (cast<LoadInst>(I)->isVolatile()) 510 return Attribute::None; 511 512 IsRead = true; 513 break; 514 515 case Instruction::ICmp: 516 case Instruction::Ret: 517 break; 518 519 default: 520 return Attribute::None; 521 } 522 } 523 524 return IsRead ? Attribute::ReadOnly : Attribute::ReadNone; 525 } 526 527 /// Deduce returned attributes for the SCC. 528 static bool addArgumentReturnedAttrs(const SCCNodeSet &SCCNodes) { 529 bool Changed = false; 530 531 // Check each function in turn, determining if an argument is always returned. 532 for (Function *F : SCCNodes) { 533 // We can infer and propagate function attributes only when we know that the 534 // definition we'll get at link time is *exactly* the definition we see now. 535 // For more details, see GlobalValue::mayBeDerefined. 536 if (!F->hasExactDefinition()) 537 continue; 538 539 if (F->getReturnType()->isVoidTy()) 540 continue; 541 542 // There is nothing to do if an argument is already marked as 'returned'. 543 if (llvm::any_of(F->args(), 544 [](const Argument &Arg) { return Arg.hasReturnedAttr(); })) 545 continue; 546 547 auto FindRetArg = [&]() -> Value * { 548 Value *RetArg = nullptr; 549 for (BasicBlock &BB : *F) 550 if (auto *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) { 551 // Note that stripPointerCasts should look through functions with 552 // returned arguments. 553 Value *RetVal = Ret->getReturnValue()->stripPointerCasts(); 554 if (!isa<Argument>(RetVal) || RetVal->getType() != F->getReturnType()) 555 return nullptr; 556 557 if (!RetArg) 558 RetArg = RetVal; 559 else if (RetArg != RetVal) 560 return nullptr; 561 } 562 563 return RetArg; 564 }; 565 566 if (Value *RetArg = FindRetArg()) { 567 auto *A = cast<Argument>(RetArg); 568 A->addAttr(Attribute::Returned); 569 ++NumReturned; 570 Changed = true; 571 } 572 } 573 574 return Changed; 575 } 576 577 /// If a callsite has arguments that are also arguments to the parent function, 578 /// try to propagate attributes from the callsite's arguments to the parent's 579 /// arguments. This may be important because inlining can cause information loss 580 /// when attribute knowledge disappears with the inlined call. 581 static bool addArgumentAttrsFromCallsites(Function &F) { 582 if (!EnableNonnullArgPropagation) 583 return false; 584 585 bool Changed = false; 586 587 // For an argument attribute to transfer from a callsite to the parent, the 588 // call must be guaranteed to execute every time the parent is called. 589 // Conservatively, just check for calls in the entry block that are guaranteed 590 // to execute. 591 // TODO: This could be enhanced by testing if the callsite post-dominates the 592 // entry block or by doing simple forward walks or backward walks to the 593 // callsite. 594 BasicBlock &Entry = F.getEntryBlock(); 595 for (Instruction &I : Entry) { 596 if (auto CS = CallSite(&I)) { 597 if (auto *CalledFunc = CS.getCalledFunction()) { 598 for (auto &CSArg : CalledFunc->args()) { 599 if (!CSArg.hasNonNullAttr()) 600 continue; 601 602 // If the non-null callsite argument operand is an argument to 'F' 603 // (the caller) and the call is guaranteed to execute, then the value 604 // must be non-null throughout 'F'. 605 auto *FArg = dyn_cast<Argument>(CS.getArgOperand(CSArg.getArgNo())); 606 if (FArg && !FArg->hasNonNullAttr()) { 607 FArg->addAttr(Attribute::NonNull); 608 Changed = true; 609 } 610 } 611 } 612 } 613 if (!isGuaranteedToTransferExecutionToSuccessor(&I)) 614 break; 615 } 616 617 return Changed; 618 } 619 620 /// Deduce nocapture attributes for the SCC. 621 static bool addArgumentAttrs(const SCCNodeSet &SCCNodes) { 622 bool Changed = false; 623 624 ArgumentGraph AG; 625 626 // Check each function in turn, determining which pointer arguments are not 627 // captured. 628 for (Function *F : SCCNodes) { 629 // We can infer and propagate function attributes only when we know that the 630 // definition we'll get at link time is *exactly* the definition we see now. 631 // For more details, see GlobalValue::mayBeDerefined. 632 if (!F->hasExactDefinition()) 633 continue; 634 635 Changed |= addArgumentAttrsFromCallsites(*F); 636 637 // Functions that are readonly (or readnone) and nounwind and don't return 638 // a value can't capture arguments. Don't analyze them. 639 if (F->onlyReadsMemory() && F->doesNotThrow() && 640 F->getReturnType()->isVoidTy()) { 641 for (Function::arg_iterator A = F->arg_begin(), E = F->arg_end(); A != E; 642 ++A) { 643 if (A->getType()->isPointerTy() && !A->hasNoCaptureAttr()) { 644 A->addAttr(Attribute::NoCapture); 645 ++NumNoCapture; 646 Changed = true; 647 } 648 } 649 continue; 650 } 651 652 for (Function::arg_iterator A = F->arg_begin(), E = F->arg_end(); A != E; 653 ++A) { 654 if (!A->getType()->isPointerTy()) 655 continue; 656 bool HasNonLocalUses = false; 657 if (!A->hasNoCaptureAttr()) { 658 ArgumentUsesTracker Tracker(SCCNodes); 659 PointerMayBeCaptured(&*A, &Tracker); 660 if (!Tracker.Captured) { 661 if (Tracker.Uses.empty()) { 662 // If it's trivially not captured, mark it nocapture now. 663 A->addAttr(Attribute::NoCapture); 664 ++NumNoCapture; 665 Changed = true; 666 } else { 667 // If it's not trivially captured and not trivially not captured, 668 // then it must be calling into another function in our SCC. Save 669 // its particulars for Argument-SCC analysis later. 670 ArgumentGraphNode *Node = AG[&*A]; 671 for (Argument *Use : Tracker.Uses) { 672 Node->Uses.push_back(AG[Use]); 673 if (Use != &*A) 674 HasNonLocalUses = true; 675 } 676 } 677 } 678 // Otherwise, it's captured. Don't bother doing SCC analysis on it. 679 } 680 if (!HasNonLocalUses && !A->onlyReadsMemory()) { 681 // Can we determine that it's readonly/readnone without doing an SCC? 682 // Note that we don't allow any calls at all here, or else our result 683 // will be dependent on the iteration order through the functions in the 684 // SCC. 685 SmallPtrSet<Argument *, 8> Self; 686 Self.insert(&*A); 687 Attribute::AttrKind R = determinePointerReadAttrs(&*A, Self); 688 if (R != Attribute::None) { 689 A->addAttr(R); 690 Changed = true; 691 R == Attribute::ReadOnly ? ++NumReadOnlyArg : ++NumReadNoneArg; 692 } 693 } 694 } 695 } 696 697 // The graph we've collected is partial because we stopped scanning for 698 // argument uses once we solved the argument trivially. These partial nodes 699 // show up as ArgumentGraphNode objects with an empty Uses list, and for 700 // these nodes the final decision about whether they capture has already been 701 // made. If the definition doesn't have a 'nocapture' attribute by now, it 702 // captures. 703 704 for (scc_iterator<ArgumentGraph *> I = scc_begin(&AG); !I.isAtEnd(); ++I) { 705 const std::vector<ArgumentGraphNode *> &ArgumentSCC = *I; 706 if (ArgumentSCC.size() == 1) { 707 if (!ArgumentSCC[0]->Definition) 708 continue; // synthetic root node 709 710 // eg. "void f(int* x) { if (...) f(x); }" 711 if (ArgumentSCC[0]->Uses.size() == 1 && 712 ArgumentSCC[0]->Uses[0] == ArgumentSCC[0]) { 713 Argument *A = ArgumentSCC[0]->Definition; 714 A->addAttr(Attribute::NoCapture); 715 ++NumNoCapture; 716 Changed = true; 717 } 718 continue; 719 } 720 721 bool SCCCaptured = false; 722 for (auto I = ArgumentSCC.begin(), E = ArgumentSCC.end(); 723 I != E && !SCCCaptured; ++I) { 724 ArgumentGraphNode *Node = *I; 725 if (Node->Uses.empty()) { 726 if (!Node->Definition->hasNoCaptureAttr()) 727 SCCCaptured = true; 728 } 729 } 730 if (SCCCaptured) 731 continue; 732 733 SmallPtrSet<Argument *, 8> ArgumentSCCNodes; 734 // Fill ArgumentSCCNodes with the elements of the ArgumentSCC. Used for 735 // quickly looking up whether a given Argument is in this ArgumentSCC. 736 for (ArgumentGraphNode *I : ArgumentSCC) { 737 ArgumentSCCNodes.insert(I->Definition); 738 } 739 740 for (auto I = ArgumentSCC.begin(), E = ArgumentSCC.end(); 741 I != E && !SCCCaptured; ++I) { 742 ArgumentGraphNode *N = *I; 743 for (ArgumentGraphNode *Use : N->Uses) { 744 Argument *A = Use->Definition; 745 if (A->hasNoCaptureAttr() || ArgumentSCCNodes.count(A)) 746 continue; 747 SCCCaptured = true; 748 break; 749 } 750 } 751 if (SCCCaptured) 752 continue; 753 754 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 755 Argument *A = ArgumentSCC[i]->Definition; 756 A->addAttr(Attribute::NoCapture); 757 ++NumNoCapture; 758 Changed = true; 759 } 760 761 // We also want to compute readonly/readnone. With a small number of false 762 // negatives, we can assume that any pointer which is captured isn't going 763 // to be provably readonly or readnone, since by definition we can't 764 // analyze all uses of a captured pointer. 765 // 766 // The false negatives happen when the pointer is captured by a function 767 // that promises readonly/readnone behaviour on the pointer, then the 768 // pointer's lifetime ends before anything that writes to arbitrary memory. 769 // Also, a readonly/readnone pointer may be returned, but returning a 770 // pointer is capturing it. 771 772 Attribute::AttrKind ReadAttr = Attribute::ReadNone; 773 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 774 Argument *A = ArgumentSCC[i]->Definition; 775 Attribute::AttrKind K = determinePointerReadAttrs(A, ArgumentSCCNodes); 776 if (K == Attribute::ReadNone) 777 continue; 778 if (K == Attribute::ReadOnly) { 779 ReadAttr = Attribute::ReadOnly; 780 continue; 781 } 782 ReadAttr = K; 783 break; 784 } 785 786 if (ReadAttr != Attribute::None) { 787 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 788 Argument *A = ArgumentSCC[i]->Definition; 789 // Clear out existing readonly/readnone attributes 790 A->removeAttr(Attribute::ReadOnly); 791 A->removeAttr(Attribute::ReadNone); 792 A->addAttr(ReadAttr); 793 ReadAttr == Attribute::ReadOnly ? ++NumReadOnlyArg : ++NumReadNoneArg; 794 Changed = true; 795 } 796 } 797 } 798 799 return Changed; 800 } 801 802 /// Tests whether a function is "malloc-like". 803 /// 804 /// A function is "malloc-like" if it returns either null or a pointer that 805 /// doesn't alias any other pointer visible to the caller. 806 static bool isFunctionMallocLike(Function *F, const SCCNodeSet &SCCNodes) { 807 SmallSetVector<Value *, 8> FlowsToReturn; 808 for (BasicBlock &BB : *F) 809 if (ReturnInst *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) 810 FlowsToReturn.insert(Ret->getReturnValue()); 811 812 for (unsigned i = 0; i != FlowsToReturn.size(); ++i) { 813 Value *RetVal = FlowsToReturn[i]; 814 815 if (Constant *C = dyn_cast<Constant>(RetVal)) { 816 if (!C->isNullValue() && !isa<UndefValue>(C)) 817 return false; 818 819 continue; 820 } 821 822 if (isa<Argument>(RetVal)) 823 return false; 824 825 if (Instruction *RVI = dyn_cast<Instruction>(RetVal)) 826 switch (RVI->getOpcode()) { 827 // Extend the analysis by looking upwards. 828 case Instruction::BitCast: 829 case Instruction::GetElementPtr: 830 case Instruction::AddrSpaceCast: 831 FlowsToReturn.insert(RVI->getOperand(0)); 832 continue; 833 case Instruction::Select: { 834 SelectInst *SI = cast<SelectInst>(RVI); 835 FlowsToReturn.insert(SI->getTrueValue()); 836 FlowsToReturn.insert(SI->getFalseValue()); 837 continue; 838 } 839 case Instruction::PHI: { 840 PHINode *PN = cast<PHINode>(RVI); 841 for (Value *IncValue : PN->incoming_values()) 842 FlowsToReturn.insert(IncValue); 843 continue; 844 } 845 846 // Check whether the pointer came from an allocation. 847 case Instruction::Alloca: 848 break; 849 case Instruction::Call: 850 case Instruction::Invoke: { 851 CallSite CS(RVI); 852 if (CS.hasRetAttr(Attribute::NoAlias)) 853 break; 854 if (CS.getCalledFunction() && SCCNodes.count(CS.getCalledFunction())) 855 break; 856 LLVM_FALLTHROUGH; 857 } 858 default: 859 return false; // Did not come from an allocation. 860 } 861 862 if (PointerMayBeCaptured(RetVal, false, /*StoreCaptures=*/false)) 863 return false; 864 } 865 866 return true; 867 } 868 869 /// Deduce noalias attributes for the SCC. 870 static bool addNoAliasAttrs(const SCCNodeSet &SCCNodes) { 871 // Check each function in turn, determining which functions return noalias 872 // pointers. 873 for (Function *F : SCCNodes) { 874 // Already noalias. 875 if (F->returnDoesNotAlias()) 876 continue; 877 878 // We can infer and propagate function attributes only when we know that the 879 // definition we'll get at link time is *exactly* the definition we see now. 880 // For more details, see GlobalValue::mayBeDerefined. 881 if (!F->hasExactDefinition()) 882 return false; 883 884 // We annotate noalias return values, which are only applicable to 885 // pointer types. 886 if (!F->getReturnType()->isPointerTy()) 887 continue; 888 889 if (!isFunctionMallocLike(F, SCCNodes)) 890 return false; 891 } 892 893 bool MadeChange = false; 894 for (Function *F : SCCNodes) { 895 if (F->returnDoesNotAlias() || 896 !F->getReturnType()->isPointerTy()) 897 continue; 898 899 F->setReturnDoesNotAlias(); 900 ++NumNoAlias; 901 MadeChange = true; 902 } 903 904 return MadeChange; 905 } 906 907 /// Tests whether this function is known to not return null. 908 /// 909 /// Requires that the function returns a pointer. 910 /// 911 /// Returns true if it believes the function will not return a null, and sets 912 /// \p Speculative based on whether the returned conclusion is a speculative 913 /// conclusion due to SCC calls. 914 static bool isReturnNonNull(Function *F, const SCCNodeSet &SCCNodes, 915 bool &Speculative) { 916 assert(F->getReturnType()->isPointerTy() && 917 "nonnull only meaningful on pointer types"); 918 Speculative = false; 919 920 SmallSetVector<Value *, 8> FlowsToReturn; 921 for (BasicBlock &BB : *F) 922 if (auto *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) 923 FlowsToReturn.insert(Ret->getReturnValue()); 924 925 auto &DL = F->getParent()->getDataLayout(); 926 927 for (unsigned i = 0; i != FlowsToReturn.size(); ++i) { 928 Value *RetVal = FlowsToReturn[i]; 929 930 // If this value is locally known to be non-null, we're good 931 if (isKnownNonZero(RetVal, DL)) 932 continue; 933 934 // Otherwise, we need to look upwards since we can't make any local 935 // conclusions. 936 Instruction *RVI = dyn_cast<Instruction>(RetVal); 937 if (!RVI) 938 return false; 939 switch (RVI->getOpcode()) { 940 // Extend the analysis by looking upwards. 941 case Instruction::BitCast: 942 case Instruction::GetElementPtr: 943 case Instruction::AddrSpaceCast: 944 FlowsToReturn.insert(RVI->getOperand(0)); 945 continue; 946 case Instruction::Select: { 947 SelectInst *SI = cast<SelectInst>(RVI); 948 FlowsToReturn.insert(SI->getTrueValue()); 949 FlowsToReturn.insert(SI->getFalseValue()); 950 continue; 951 } 952 case Instruction::PHI: { 953 PHINode *PN = cast<PHINode>(RVI); 954 for (int i = 0, e = PN->getNumIncomingValues(); i != e; ++i) 955 FlowsToReturn.insert(PN->getIncomingValue(i)); 956 continue; 957 } 958 case Instruction::Call: 959 case Instruction::Invoke: { 960 CallSite CS(RVI); 961 Function *Callee = CS.getCalledFunction(); 962 // A call to a node within the SCC is assumed to return null until 963 // proven otherwise 964 if (Callee && SCCNodes.count(Callee)) { 965 Speculative = true; 966 continue; 967 } 968 return false; 969 } 970 default: 971 return false; // Unknown source, may be null 972 }; 973 llvm_unreachable("should have either continued or returned"); 974 } 975 976 return true; 977 } 978 979 /// Deduce nonnull attributes for the SCC. 980 static bool addNonNullAttrs(const SCCNodeSet &SCCNodes) { 981 // Speculative that all functions in the SCC return only nonnull 982 // pointers. We may refute this as we analyze functions. 983 bool SCCReturnsNonNull = true; 984 985 bool MadeChange = false; 986 987 // Check each function in turn, determining which functions return nonnull 988 // pointers. 989 for (Function *F : SCCNodes) { 990 // Already nonnull. 991 if (F->getAttributes().hasAttribute(AttributeList::ReturnIndex, 992 Attribute::NonNull)) 993 continue; 994 995 // We can infer and propagate function attributes only when we know that the 996 // definition we'll get at link time is *exactly* the definition we see now. 997 // For more details, see GlobalValue::mayBeDerefined. 998 if (!F->hasExactDefinition()) 999 return false; 1000 1001 // We annotate nonnull return values, which are only applicable to 1002 // pointer types. 1003 if (!F->getReturnType()->isPointerTy()) 1004 continue; 1005 1006 bool Speculative = false; 1007 if (isReturnNonNull(F, SCCNodes, Speculative)) { 1008 if (!Speculative) { 1009 // Mark the function eagerly since we may discover a function 1010 // which prevents us from speculating about the entire SCC 1011 DEBUG(dbgs() << "Eagerly marking " << F->getName() << " as nonnull\n"); 1012 F->addAttribute(AttributeList::ReturnIndex, Attribute::NonNull); 1013 ++NumNonNullReturn; 1014 MadeChange = true; 1015 } 1016 continue; 1017 } 1018 // At least one function returns something which could be null, can't 1019 // speculate any more. 1020 SCCReturnsNonNull = false; 1021 } 1022 1023 if (SCCReturnsNonNull) { 1024 for (Function *F : SCCNodes) { 1025 if (F->getAttributes().hasAttribute(AttributeList::ReturnIndex, 1026 Attribute::NonNull) || 1027 !F->getReturnType()->isPointerTy()) 1028 continue; 1029 1030 DEBUG(dbgs() << "SCC marking " << F->getName() << " as nonnull\n"); 1031 F->addAttribute(AttributeList::ReturnIndex, Attribute::NonNull); 1032 ++NumNonNullReturn; 1033 MadeChange = true; 1034 } 1035 } 1036 1037 return MadeChange; 1038 } 1039 1040 /// Remove the convergent attribute from all functions in the SCC if every 1041 /// callsite within the SCC is not convergent (except for calls to functions 1042 /// within the SCC). Returns true if changes were made. 1043 static bool removeConvergentAttrs(const SCCNodeSet &SCCNodes) { 1044 // For every function in SCC, ensure that either 1045 // * it is not convergent, or 1046 // * we can remove its convergent attribute. 1047 bool HasConvergentFn = false; 1048 for (Function *F : SCCNodes) { 1049 if (!F->isConvergent()) continue; 1050 HasConvergentFn = true; 1051 1052 // Can't remove convergent from function declarations. 1053 if (F->isDeclaration()) return false; 1054 1055 // Can't remove convergent if any of our functions has a convergent call to a 1056 // function not in the SCC. 1057 for (Instruction &I : instructions(*F)) { 1058 CallSite CS(&I); 1059 // Bail if CS is a convergent call to a function not in the SCC. 1060 if (CS && CS.isConvergent() && 1061 SCCNodes.count(CS.getCalledFunction()) == 0) 1062 return false; 1063 } 1064 } 1065 1066 // If the SCC doesn't have any convergent functions, we have nothing to do. 1067 if (!HasConvergentFn) return false; 1068 1069 // If we got here, all of the calls the SCC makes to functions not in the SCC 1070 // are non-convergent. Therefore all of the SCC's functions can also be made 1071 // non-convergent. We'll remove the attr from the callsites in 1072 // InstCombineCalls. 1073 for (Function *F : SCCNodes) { 1074 if (!F->isConvergent()) continue; 1075 1076 DEBUG(dbgs() << "Removing convergent attr from fn " << F->getName() 1077 << "\n"); 1078 F->setNotConvergent(); 1079 } 1080 return true; 1081 } 1082 1083 static bool setDoesNotRecurse(Function &F) { 1084 if (F.doesNotRecurse()) 1085 return false; 1086 F.setDoesNotRecurse(); 1087 ++NumNoRecurse; 1088 return true; 1089 } 1090 1091 static bool addNoRecurseAttrs(const SCCNodeSet &SCCNodes) { 1092 // Try and identify functions that do not recurse. 1093 1094 // If the SCC contains multiple nodes we know for sure there is recursion. 1095 if (SCCNodes.size() != 1) 1096 return false; 1097 1098 Function *F = *SCCNodes.begin(); 1099 if (!F || F->isDeclaration() || F->doesNotRecurse()) 1100 return false; 1101 1102 // If all of the calls in F are identifiable and are to norecurse functions, F 1103 // is norecurse. This check also detects self-recursion as F is not currently 1104 // marked norecurse, so any called from F to F will not be marked norecurse. 1105 for (Instruction &I : instructions(*F)) 1106 if (auto CS = CallSite(&I)) { 1107 Function *Callee = CS.getCalledFunction(); 1108 if (!Callee || Callee == F || !Callee->doesNotRecurse()) 1109 // Function calls a potentially recursive function. 1110 return false; 1111 } 1112 1113 // Every call was to a non-recursive function other than this function, and 1114 // we have no indirect recursion as the SCC size is one. This function cannot 1115 // recurse. 1116 return setDoesNotRecurse(*F); 1117 } 1118 1119 PreservedAnalyses PostOrderFunctionAttrsPass::run(LazyCallGraph::SCC &C, 1120 CGSCCAnalysisManager &AM, 1121 LazyCallGraph &CG, 1122 CGSCCUpdateResult &) { 1123 FunctionAnalysisManager &FAM = 1124 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager(); 1125 1126 // We pass a lambda into functions to wire them up to the analysis manager 1127 // for getting function analyses. 1128 auto AARGetter = [&](Function &F) -> AAResults & { 1129 return FAM.getResult<AAManager>(F); 1130 }; 1131 1132 // Fill SCCNodes with the elements of the SCC. Also track whether there are 1133 // any external or opt-none nodes that will prevent us from optimizing any 1134 // part of the SCC. 1135 SCCNodeSet SCCNodes; 1136 bool HasUnknownCall = false; 1137 for (LazyCallGraph::Node &N : C) { 1138 Function &F = N.getFunction(); 1139 if (F.hasFnAttribute(Attribute::OptimizeNone) || 1140 F.hasFnAttribute(Attribute::Naked)) { 1141 // Treat any function we're trying not to optimize as if it were an 1142 // indirect call and omit it from the node set used below. 1143 HasUnknownCall = true; 1144 continue; 1145 } 1146 // Track whether any functions in this SCC have an unknown call edge. 1147 // Note: if this is ever a performance hit, we can common it with 1148 // subsequent routines which also do scans over the instructions of the 1149 // function. 1150 if (!HasUnknownCall) 1151 for (Instruction &I : instructions(F)) 1152 if (auto CS = CallSite(&I)) 1153 if (!CS.getCalledFunction()) { 1154 HasUnknownCall = true; 1155 break; 1156 } 1157 1158 SCCNodes.insert(&F); 1159 } 1160 1161 bool Changed = false; 1162 Changed |= addArgumentReturnedAttrs(SCCNodes); 1163 Changed |= addReadAttrs(SCCNodes, AARGetter); 1164 Changed |= addArgumentAttrs(SCCNodes); 1165 1166 // If we have no external nodes participating in the SCC, we can deduce some 1167 // more precise attributes as well. 1168 if (!HasUnknownCall) { 1169 Changed |= addNoAliasAttrs(SCCNodes); 1170 Changed |= addNonNullAttrs(SCCNodes); 1171 Changed |= removeConvergentAttrs(SCCNodes); 1172 Changed |= addNoRecurseAttrs(SCCNodes); 1173 } 1174 1175 return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all(); 1176 } 1177 1178 namespace { 1179 1180 struct PostOrderFunctionAttrsLegacyPass : public CallGraphSCCPass { 1181 // Pass identification, replacement for typeid 1182 static char ID; 1183 1184 PostOrderFunctionAttrsLegacyPass() : CallGraphSCCPass(ID) { 1185 initializePostOrderFunctionAttrsLegacyPassPass( 1186 *PassRegistry::getPassRegistry()); 1187 } 1188 1189 bool runOnSCC(CallGraphSCC &SCC) override; 1190 1191 void getAnalysisUsage(AnalysisUsage &AU) const override { 1192 AU.setPreservesCFG(); 1193 AU.addRequired<AssumptionCacheTracker>(); 1194 getAAResultsAnalysisUsage(AU); 1195 CallGraphSCCPass::getAnalysisUsage(AU); 1196 } 1197 }; 1198 1199 } // end anonymous namespace 1200 1201 char PostOrderFunctionAttrsLegacyPass::ID = 0; 1202 INITIALIZE_PASS_BEGIN(PostOrderFunctionAttrsLegacyPass, "functionattrs", 1203 "Deduce function attributes", false, false) 1204 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) 1205 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass) 1206 INITIALIZE_PASS_END(PostOrderFunctionAttrsLegacyPass, "functionattrs", 1207 "Deduce function attributes", false, false) 1208 1209 Pass *llvm::createPostOrderFunctionAttrsLegacyPass() { 1210 return new PostOrderFunctionAttrsLegacyPass(); 1211 } 1212 1213 template <typename AARGetterT> 1214 static bool runImpl(CallGraphSCC &SCC, AARGetterT AARGetter) { 1215 bool Changed = false; 1216 1217 // Fill SCCNodes with the elements of the SCC. Used for quickly looking up 1218 // whether a given CallGraphNode is in this SCC. Also track whether there are 1219 // any external or opt-none nodes that will prevent us from optimizing any 1220 // part of the SCC. 1221 SCCNodeSet SCCNodes; 1222 bool ExternalNode = false; 1223 for (CallGraphNode *I : SCC) { 1224 Function *F = I->getFunction(); 1225 if (!F || F->hasFnAttribute(Attribute::OptimizeNone) || 1226 F->hasFnAttribute(Attribute::Naked)) { 1227 // External node or function we're trying not to optimize - we both avoid 1228 // transform them and avoid leveraging information they provide. 1229 ExternalNode = true; 1230 continue; 1231 } 1232 1233 SCCNodes.insert(F); 1234 } 1235 1236 // Skip it if the SCC only contains optnone functions. 1237 if (SCCNodes.empty()) 1238 return Changed; 1239 1240 Changed |= addArgumentReturnedAttrs(SCCNodes); 1241 Changed |= addReadAttrs(SCCNodes, AARGetter); 1242 Changed |= addArgumentAttrs(SCCNodes); 1243 1244 // If we have no external nodes participating in the SCC, we can deduce some 1245 // more precise attributes as well. 1246 if (!ExternalNode) { 1247 Changed |= addNoAliasAttrs(SCCNodes); 1248 Changed |= addNonNullAttrs(SCCNodes); 1249 Changed |= removeConvergentAttrs(SCCNodes); 1250 Changed |= addNoRecurseAttrs(SCCNodes); 1251 } 1252 1253 return Changed; 1254 } 1255 1256 bool PostOrderFunctionAttrsLegacyPass::runOnSCC(CallGraphSCC &SCC) { 1257 if (skipSCC(SCC)) 1258 return false; 1259 return runImpl(SCC, LegacyAARGetter(*this)); 1260 } 1261 1262 namespace { 1263 1264 struct ReversePostOrderFunctionAttrsLegacyPass : public ModulePass { 1265 // Pass identification, replacement for typeid 1266 static char ID; 1267 1268 ReversePostOrderFunctionAttrsLegacyPass() : ModulePass(ID) { 1269 initializeReversePostOrderFunctionAttrsLegacyPassPass( 1270 *PassRegistry::getPassRegistry()); 1271 } 1272 1273 bool runOnModule(Module &M) override; 1274 1275 void getAnalysisUsage(AnalysisUsage &AU) const override { 1276 AU.setPreservesCFG(); 1277 AU.addRequired<CallGraphWrapperPass>(); 1278 AU.addPreserved<CallGraphWrapperPass>(); 1279 } 1280 }; 1281 1282 } // end anonymous namespace 1283 1284 char ReversePostOrderFunctionAttrsLegacyPass::ID = 0; 1285 1286 INITIALIZE_PASS_BEGIN(ReversePostOrderFunctionAttrsLegacyPass, "rpo-functionattrs", 1287 "Deduce function attributes in RPO", false, false) 1288 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass) 1289 INITIALIZE_PASS_END(ReversePostOrderFunctionAttrsLegacyPass, "rpo-functionattrs", 1290 "Deduce function attributes in RPO", false, false) 1291 1292 Pass *llvm::createReversePostOrderFunctionAttrsPass() { 1293 return new ReversePostOrderFunctionAttrsLegacyPass(); 1294 } 1295 1296 static bool addNoRecurseAttrsTopDown(Function &F) { 1297 // We check the preconditions for the function prior to calling this to avoid 1298 // the cost of building up a reversible post-order list. We assert them here 1299 // to make sure none of the invariants this relies on were violated. 1300 assert(!F.isDeclaration() && "Cannot deduce norecurse without a definition!"); 1301 assert(!F.doesNotRecurse() && 1302 "This function has already been deduced as norecurs!"); 1303 assert(F.hasInternalLinkage() && 1304 "Can only do top-down deduction for internal linkage functions!"); 1305 1306 // If F is internal and all of its uses are calls from a non-recursive 1307 // functions, then none of its calls could in fact recurse without going 1308 // through a function marked norecurse, and so we can mark this function too 1309 // as norecurse. Note that the uses must actually be calls -- otherwise 1310 // a pointer to this function could be returned from a norecurse function but 1311 // this function could be recursively (indirectly) called. Note that this 1312 // also detects if F is directly recursive as F is not yet marked as 1313 // a norecurse function. 1314 for (auto *U : F.users()) { 1315 auto *I = dyn_cast<Instruction>(U); 1316 if (!I) 1317 return false; 1318 CallSite CS(I); 1319 if (!CS || !CS.getParent()->getParent()->doesNotRecurse()) 1320 return false; 1321 } 1322 return setDoesNotRecurse(F); 1323 } 1324 1325 static bool deduceFunctionAttributeInRPO(Module &M, CallGraph &CG) { 1326 // We only have a post-order SCC traversal (because SCCs are inherently 1327 // discovered in post-order), so we accumulate them in a vector and then walk 1328 // it in reverse. This is simpler than using the RPO iterator infrastructure 1329 // because we need to combine SCC detection and the PO walk of the call 1330 // graph. We can also cheat egregiously because we're primarily interested in 1331 // synthesizing norecurse and so we can only save the singular SCCs as SCCs 1332 // with multiple functions in them will clearly be recursive. 1333 SmallVector<Function *, 16> Worklist; 1334 for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) { 1335 if (I->size() != 1) 1336 continue; 1337 1338 Function *F = I->front()->getFunction(); 1339 if (F && !F->isDeclaration() && !F->doesNotRecurse() && 1340 F->hasInternalLinkage()) 1341 Worklist.push_back(F); 1342 } 1343 1344 bool Changed = false; 1345 for (auto *F : llvm::reverse(Worklist)) 1346 Changed |= addNoRecurseAttrsTopDown(*F); 1347 1348 return Changed; 1349 } 1350 1351 bool ReversePostOrderFunctionAttrsLegacyPass::runOnModule(Module &M) { 1352 if (skipModule(M)) 1353 return false; 1354 1355 auto &CG = getAnalysis<CallGraphWrapperPass>().getCallGraph(); 1356 1357 return deduceFunctionAttributeInRPO(M, CG); 1358 } 1359 1360 PreservedAnalyses 1361 ReversePostOrderFunctionAttrsPass::run(Module &M, ModuleAnalysisManager &AM) { 1362 auto &CG = AM.getResult<CallGraphAnalysis>(M); 1363 1364 if (!deduceFunctionAttributeInRPO(M, CG)) 1365 return PreservedAnalyses::all(); 1366 1367 PreservedAnalyses PA; 1368 PA.preserve<CallGraphAnalysis>(); 1369 return PA; 1370 } 1371