1 //===- FunctionAttrs.cpp - Pass which marks functions attributes ----------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// 10 /// \file 11 /// This file implements interprocedural passes which walk the 12 /// call-graph deducing and/or propagating function attributes. 13 /// 14 //===----------------------------------------------------------------------===// 15 16 #include "llvm/Transforms/IPO/FunctionAttrs.h" 17 #include "llvm/Transforms/IPO.h" 18 #include "llvm/ADT/SCCIterator.h" 19 #include "llvm/ADT/SetVector.h" 20 #include "llvm/ADT/SmallSet.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/ADT/StringSwitch.h" 23 #include "llvm/Analysis/AliasAnalysis.h" 24 #include "llvm/Analysis/AssumptionCache.h" 25 #include "llvm/Analysis/BasicAliasAnalysis.h" 26 #include "llvm/Analysis/CallGraph.h" 27 #include "llvm/Analysis/CallGraphSCCPass.h" 28 #include "llvm/Analysis/CaptureTracking.h" 29 #include "llvm/Analysis/TargetLibraryInfo.h" 30 #include "llvm/Analysis/ValueTracking.h" 31 #include "llvm/IR/GlobalVariable.h" 32 #include "llvm/IR/InstIterator.h" 33 #include "llvm/IR/IntrinsicInst.h" 34 #include "llvm/IR/LLVMContext.h" 35 #include "llvm/Support/Debug.h" 36 #include "llvm/Support/raw_ostream.h" 37 #include "llvm/Analysis/TargetLibraryInfo.h" 38 using namespace llvm; 39 40 #define DEBUG_TYPE "functionattrs" 41 42 STATISTIC(NumReadNone, "Number of functions marked readnone"); 43 STATISTIC(NumReadOnly, "Number of functions marked readonly"); 44 STATISTIC(NumNoCapture, "Number of arguments marked nocapture"); 45 STATISTIC(NumReturned, "Number of arguments marked returned"); 46 STATISTIC(NumReadNoneArg, "Number of arguments marked readnone"); 47 STATISTIC(NumReadOnlyArg, "Number of arguments marked readonly"); 48 STATISTIC(NumNoAlias, "Number of function returns marked noalias"); 49 STATISTIC(NumNonNullReturn, "Number of function returns marked nonnull"); 50 STATISTIC(NumNoRecurse, "Number of functions marked as norecurse"); 51 52 namespace { 53 typedef SmallSetVector<Function *, 8> SCCNodeSet; 54 } 55 56 namespace { 57 /// The three kinds of memory access relevant to 'readonly' and 58 /// 'readnone' attributes. 59 enum MemoryAccessKind { 60 MAK_ReadNone = 0, 61 MAK_ReadOnly = 1, 62 MAK_MayWrite = 2 63 }; 64 } 65 66 static MemoryAccessKind checkFunctionMemoryAccess(Function &F, AAResults &AAR, 67 const SCCNodeSet &SCCNodes) { 68 FunctionModRefBehavior MRB = AAR.getModRefBehavior(&F); 69 if (MRB == FMRB_DoesNotAccessMemory) 70 // Already perfect! 71 return MAK_ReadNone; 72 73 // Non-exact function definitions may not be selected at link time, and an 74 // alternative version that writes to memory may be selected. See the comment 75 // on GlobalValue::isDefinitionExact for more details. 76 if (!F.hasExactDefinition()) { 77 if (AliasAnalysis::onlyReadsMemory(MRB)) 78 return MAK_ReadOnly; 79 80 // Conservatively assume it writes to memory. 81 return MAK_MayWrite; 82 } 83 84 // Scan the function body for instructions that may read or write memory. 85 bool ReadsMemory = false; 86 for (inst_iterator II = inst_begin(F), E = inst_end(F); II != E; ++II) { 87 Instruction *I = &*II; 88 89 // Some instructions can be ignored even if they read or write memory. 90 // Detect these now, skipping to the next instruction if one is found. 91 CallSite CS(cast<Value>(I)); 92 if (CS) { 93 // Ignore calls to functions in the same SCC, as long as the call sites 94 // don't have operand bundles. Calls with operand bundles are allowed to 95 // have memory effects not described by the memory effects of the call 96 // target. 97 if (!CS.hasOperandBundles() && CS.getCalledFunction() && 98 SCCNodes.count(CS.getCalledFunction())) 99 continue; 100 FunctionModRefBehavior MRB = AAR.getModRefBehavior(CS); 101 102 // If the call doesn't access memory, we're done. 103 if (!(MRB & MRI_ModRef)) 104 continue; 105 106 if (!AliasAnalysis::onlyAccessesArgPointees(MRB)) { 107 // The call could access any memory. If that includes writes, give up. 108 if (MRB & MRI_Mod) 109 return MAK_MayWrite; 110 // If it reads, note it. 111 if (MRB & MRI_Ref) 112 ReadsMemory = true; 113 continue; 114 } 115 116 // Check whether all pointer arguments point to local memory, and 117 // ignore calls that only access local memory. 118 for (CallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end(); 119 CI != CE; ++CI) { 120 Value *Arg = *CI; 121 if (!Arg->getType()->isPtrOrPtrVectorTy()) 122 continue; 123 124 AAMDNodes AAInfo; 125 I->getAAMetadata(AAInfo); 126 MemoryLocation Loc(Arg, MemoryLocation::UnknownSize, AAInfo); 127 128 // Skip accesses to local or constant memory as they don't impact the 129 // externally visible mod/ref behavior. 130 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 131 continue; 132 133 if (MRB & MRI_Mod) 134 // Writes non-local memory. Give up. 135 return MAK_MayWrite; 136 if (MRB & MRI_Ref) 137 // Ok, it reads non-local memory. 138 ReadsMemory = true; 139 } 140 continue; 141 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) { 142 // Ignore non-volatile loads from local memory. (Atomic is okay here.) 143 if (!LI->isVolatile()) { 144 MemoryLocation Loc = MemoryLocation::get(LI); 145 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 146 continue; 147 } 148 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { 149 // Ignore non-volatile stores to local memory. (Atomic is okay here.) 150 if (!SI->isVolatile()) { 151 MemoryLocation Loc = MemoryLocation::get(SI); 152 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 153 continue; 154 } 155 } else if (VAArgInst *VI = dyn_cast<VAArgInst>(I)) { 156 // Ignore vaargs on local memory. 157 MemoryLocation Loc = MemoryLocation::get(VI); 158 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 159 continue; 160 } 161 162 // Any remaining instructions need to be taken seriously! Check if they 163 // read or write memory. 164 if (I->mayWriteToMemory()) 165 // Writes memory. Just give up. 166 return MAK_MayWrite; 167 168 // If this instruction may read memory, remember that. 169 ReadsMemory |= I->mayReadFromMemory(); 170 } 171 172 return ReadsMemory ? MAK_ReadOnly : MAK_ReadNone; 173 } 174 175 /// Deduce readonly/readnone attributes for the SCC. 176 template <typename AARGetterT> 177 static bool addReadAttrs(const SCCNodeSet &SCCNodes, AARGetterT AARGetter) { 178 // Check if any of the functions in the SCC read or write memory. If they 179 // write memory then they can't be marked readnone or readonly. 180 bool ReadsMemory = false; 181 for (Function *F : SCCNodes) { 182 // Call the callable parameter to look up AA results for this function. 183 AAResults &AAR = AARGetter(*F); 184 185 switch (checkFunctionMemoryAccess(*F, AAR, SCCNodes)) { 186 case MAK_MayWrite: 187 return false; 188 case MAK_ReadOnly: 189 ReadsMemory = true; 190 break; 191 case MAK_ReadNone: 192 // Nothing to do! 193 break; 194 } 195 } 196 197 // Success! Functions in this SCC do not access memory, or only read memory. 198 // Give them the appropriate attribute. 199 bool MadeChange = false; 200 for (Function *F : SCCNodes) { 201 if (F->doesNotAccessMemory()) 202 // Already perfect! 203 continue; 204 205 if (F->onlyReadsMemory() && ReadsMemory) 206 // No change. 207 continue; 208 209 MadeChange = true; 210 211 // Clear out any existing attributes. 212 AttrBuilder B; 213 B.addAttribute(Attribute::ReadOnly).addAttribute(Attribute::ReadNone); 214 F->removeAttributes( 215 AttributeSet::FunctionIndex, 216 AttributeSet::get(F->getContext(), AttributeSet::FunctionIndex, B)); 217 218 // Add in the new attribute. 219 F->addAttribute(AttributeSet::FunctionIndex, 220 ReadsMemory ? Attribute::ReadOnly : Attribute::ReadNone); 221 222 if (ReadsMemory) 223 ++NumReadOnly; 224 else 225 ++NumReadNone; 226 } 227 228 return MadeChange; 229 } 230 231 namespace { 232 /// For a given pointer Argument, this retains a list of Arguments of functions 233 /// in the same SCC that the pointer data flows into. We use this to build an 234 /// SCC of the arguments. 235 struct ArgumentGraphNode { 236 Argument *Definition; 237 SmallVector<ArgumentGraphNode *, 4> Uses; 238 }; 239 240 class ArgumentGraph { 241 // We store pointers to ArgumentGraphNode objects, so it's important that 242 // that they not move around upon insert. 243 typedef std::map<Argument *, ArgumentGraphNode> ArgumentMapTy; 244 245 ArgumentMapTy ArgumentMap; 246 247 // There is no root node for the argument graph, in fact: 248 // void f(int *x, int *y) { if (...) f(x, y); } 249 // is an example where the graph is disconnected. The SCCIterator requires a 250 // single entry point, so we maintain a fake ("synthetic") root node that 251 // uses every node. Because the graph is directed and nothing points into 252 // the root, it will not participate in any SCCs (except for its own). 253 ArgumentGraphNode SyntheticRoot; 254 255 public: 256 ArgumentGraph() { SyntheticRoot.Definition = nullptr; } 257 258 typedef SmallVectorImpl<ArgumentGraphNode *>::iterator iterator; 259 260 iterator begin() { return SyntheticRoot.Uses.begin(); } 261 iterator end() { return SyntheticRoot.Uses.end(); } 262 ArgumentGraphNode *getEntryNode() { return &SyntheticRoot; } 263 264 ArgumentGraphNode *operator[](Argument *A) { 265 ArgumentGraphNode &Node = ArgumentMap[A]; 266 Node.Definition = A; 267 SyntheticRoot.Uses.push_back(&Node); 268 return &Node; 269 } 270 }; 271 272 /// This tracker checks whether callees are in the SCC, and if so it does not 273 /// consider that a capture, instead adding it to the "Uses" list and 274 /// continuing with the analysis. 275 struct ArgumentUsesTracker : public CaptureTracker { 276 ArgumentUsesTracker(const SCCNodeSet &SCCNodes) 277 : Captured(false), SCCNodes(SCCNodes) {} 278 279 void tooManyUses() override { Captured = true; } 280 281 bool captured(const Use *U) override { 282 CallSite CS(U->getUser()); 283 if (!CS.getInstruction()) { 284 Captured = true; 285 return true; 286 } 287 288 Function *F = CS.getCalledFunction(); 289 if (!F || !F->hasExactDefinition() || !SCCNodes.count(F)) { 290 Captured = true; 291 return true; 292 } 293 294 // Note: the callee and the two successor blocks *follow* the argument 295 // operands. This means there is no need to adjust UseIndex to account for 296 // these. 297 298 unsigned UseIndex = 299 std::distance(const_cast<const Use *>(CS.arg_begin()), U); 300 301 assert(UseIndex < CS.data_operands_size() && 302 "Indirect function calls should have been filtered above!"); 303 304 if (UseIndex >= CS.getNumArgOperands()) { 305 // Data operand, but not a argument operand -- must be a bundle operand 306 assert(CS.hasOperandBundles() && "Must be!"); 307 308 // CaptureTracking told us that we're being captured by an operand bundle 309 // use. In this case it does not matter if the callee is within our SCC 310 // or not -- we've been captured in some unknown way, and we have to be 311 // conservative. 312 Captured = true; 313 return true; 314 } 315 316 if (UseIndex >= F->arg_size()) { 317 assert(F->isVarArg() && "More params than args in non-varargs call"); 318 Captured = true; 319 return true; 320 } 321 322 Uses.push_back(&*std::next(F->arg_begin(), UseIndex)); 323 return false; 324 } 325 326 bool Captured; // True only if certainly captured (used outside our SCC). 327 SmallVector<Argument *, 4> Uses; // Uses within our SCC. 328 329 const SCCNodeSet &SCCNodes; 330 }; 331 } 332 333 namespace llvm { 334 template <> struct GraphTraits<ArgumentGraphNode *> { 335 typedef ArgumentGraphNode NodeType; 336 typedef SmallVectorImpl<ArgumentGraphNode *>::iterator ChildIteratorType; 337 338 static inline NodeType *getEntryNode(NodeType *A) { return A; } 339 static inline ChildIteratorType child_begin(NodeType *N) { 340 return N->Uses.begin(); 341 } 342 static inline ChildIteratorType child_end(NodeType *N) { 343 return N->Uses.end(); 344 } 345 }; 346 template <> 347 struct GraphTraits<ArgumentGraph *> : public GraphTraits<ArgumentGraphNode *> { 348 static NodeType *getEntryNode(ArgumentGraph *AG) { 349 return AG->getEntryNode(); 350 } 351 static ChildIteratorType nodes_begin(ArgumentGraph *AG) { 352 return AG->begin(); 353 } 354 static ChildIteratorType nodes_end(ArgumentGraph *AG) { return AG->end(); } 355 }; 356 } 357 358 /// Returns Attribute::None, Attribute::ReadOnly or Attribute::ReadNone. 359 static Attribute::AttrKind 360 determinePointerReadAttrs(Argument *A, 361 const SmallPtrSet<Argument *, 8> &SCCNodes) { 362 363 SmallVector<Use *, 32> Worklist; 364 SmallSet<Use *, 32> Visited; 365 366 // inalloca arguments are always clobbered by the call. 367 if (A->hasInAllocaAttr()) 368 return Attribute::None; 369 370 bool IsRead = false; 371 // We don't need to track IsWritten. If A is written to, return immediately. 372 373 for (Use &U : A->uses()) { 374 Visited.insert(&U); 375 Worklist.push_back(&U); 376 } 377 378 while (!Worklist.empty()) { 379 Use *U = Worklist.pop_back_val(); 380 Instruction *I = cast<Instruction>(U->getUser()); 381 382 switch (I->getOpcode()) { 383 case Instruction::BitCast: 384 case Instruction::GetElementPtr: 385 case Instruction::PHI: 386 case Instruction::Select: 387 case Instruction::AddrSpaceCast: 388 // The original value is not read/written via this if the new value isn't. 389 for (Use &UU : I->uses()) 390 if (Visited.insert(&UU).second) 391 Worklist.push_back(&UU); 392 break; 393 394 case Instruction::Call: 395 case Instruction::Invoke: { 396 bool Captures = true; 397 398 if (I->getType()->isVoidTy()) 399 Captures = false; 400 401 auto AddUsersToWorklistIfCapturing = [&] { 402 if (Captures) 403 for (Use &UU : I->uses()) 404 if (Visited.insert(&UU).second) 405 Worklist.push_back(&UU); 406 }; 407 408 CallSite CS(I); 409 if (CS.doesNotAccessMemory()) { 410 AddUsersToWorklistIfCapturing(); 411 continue; 412 } 413 414 Function *F = CS.getCalledFunction(); 415 if (!F) { 416 if (CS.onlyReadsMemory()) { 417 IsRead = true; 418 AddUsersToWorklistIfCapturing(); 419 continue; 420 } 421 return Attribute::None; 422 } 423 424 // Note: the callee and the two successor blocks *follow* the argument 425 // operands. This means there is no need to adjust UseIndex to account 426 // for these. 427 428 unsigned UseIndex = std::distance(CS.arg_begin(), U); 429 430 // U cannot be the callee operand use: since we're exploring the 431 // transitive uses of an Argument, having such a use be a callee would 432 // imply the CallSite is an indirect call or invoke; and we'd take the 433 // early exit above. 434 assert(UseIndex < CS.data_operands_size() && 435 "Data operand use expected!"); 436 437 bool IsOperandBundleUse = UseIndex >= CS.getNumArgOperands(); 438 439 if (UseIndex >= F->arg_size() && !IsOperandBundleUse) { 440 assert(F->isVarArg() && "More params than args in non-varargs call"); 441 return Attribute::None; 442 } 443 444 Captures &= !CS.doesNotCapture(UseIndex); 445 446 // Since the optimizer (by design) cannot see the data flow corresponding 447 // to a operand bundle use, these cannot participate in the optimistic SCC 448 // analysis. Instead, we model the operand bundle uses as arguments in 449 // call to a function external to the SCC. 450 if (!SCCNodes.count(&*std::next(F->arg_begin(), UseIndex)) || 451 IsOperandBundleUse) { 452 453 // The accessors used on CallSite here do the right thing for calls and 454 // invokes with operand bundles. 455 456 if (!CS.onlyReadsMemory() && !CS.onlyReadsMemory(UseIndex)) 457 return Attribute::None; 458 if (!CS.doesNotAccessMemory(UseIndex)) 459 IsRead = true; 460 } 461 462 AddUsersToWorklistIfCapturing(); 463 break; 464 } 465 466 case Instruction::Load: 467 // A volatile load has side effects beyond what readonly can be relied 468 // upon. 469 if (cast<LoadInst>(I)->isVolatile()) 470 return Attribute::None; 471 472 IsRead = true; 473 break; 474 475 case Instruction::ICmp: 476 case Instruction::Ret: 477 break; 478 479 default: 480 return Attribute::None; 481 } 482 } 483 484 return IsRead ? Attribute::ReadOnly : Attribute::ReadNone; 485 } 486 487 /// Deduce returned attributes for the SCC. 488 static bool addArgumentReturnedAttrs(const SCCNodeSet &SCCNodes) { 489 bool Changed = false; 490 491 AttrBuilder B; 492 B.addAttribute(Attribute::Returned); 493 494 // Check each function in turn, determining if an argument is always returned. 495 for (Function *F : SCCNodes) { 496 // We can infer and propagate function attributes only when we know that the 497 // definition we'll get at link time is *exactly* the definition we see now. 498 // For more details, see GlobalValue::mayBeDerefined. 499 if (!F->hasExactDefinition()) 500 continue; 501 502 if (F->getReturnType()->isVoidTy()) 503 continue; 504 505 auto FindRetArg = [&]() -> Value * { 506 Value *RetArg = nullptr; 507 for (BasicBlock &BB : *F) 508 if (auto *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) { 509 // Note that stripPointerCasts should look through functions with 510 // returned arguments. 511 Value *RetVal = Ret->getReturnValue()->stripPointerCasts(); 512 if (!isa<Argument>(RetVal) || RetVal->getType() != F->getReturnType()) 513 return nullptr; 514 515 if (!RetArg) 516 RetArg = RetVal; 517 else if (RetArg != RetVal) 518 return nullptr; 519 } 520 521 return RetArg; 522 }; 523 524 if (Value *RetArg = FindRetArg()) { 525 auto *A = cast<Argument>(RetArg); 526 A->addAttr(AttributeSet::get(F->getContext(), A->getArgNo() + 1, B)); 527 ++NumReturned; 528 Changed = true; 529 } 530 } 531 532 return Changed; 533 } 534 535 /// Deduce nocapture attributes for the SCC. 536 static bool addArgumentAttrs(const SCCNodeSet &SCCNodes) { 537 bool Changed = false; 538 539 ArgumentGraph AG; 540 541 AttrBuilder B; 542 B.addAttribute(Attribute::NoCapture); 543 544 // Check each function in turn, determining which pointer arguments are not 545 // captured. 546 for (Function *F : SCCNodes) { 547 // We can infer and propagate function attributes only when we know that the 548 // definition we'll get at link time is *exactly* the definition we see now. 549 // For more details, see GlobalValue::mayBeDerefined. 550 if (!F->hasExactDefinition()) 551 continue; 552 553 // Functions that are readonly (or readnone) and nounwind and don't return 554 // a value can't capture arguments. Don't analyze them. 555 if (F->onlyReadsMemory() && F->doesNotThrow() && 556 F->getReturnType()->isVoidTy()) { 557 for (Function::arg_iterator A = F->arg_begin(), E = F->arg_end(); A != E; 558 ++A) { 559 if (A->getType()->isPointerTy() && !A->hasNoCaptureAttr()) { 560 A->addAttr(AttributeSet::get(F->getContext(), A->getArgNo() + 1, B)); 561 ++NumNoCapture; 562 Changed = true; 563 } 564 } 565 continue; 566 } 567 568 for (Function::arg_iterator A = F->arg_begin(), E = F->arg_end(); A != E; 569 ++A) { 570 if (!A->getType()->isPointerTy()) 571 continue; 572 bool HasNonLocalUses = false; 573 if (!A->hasNoCaptureAttr()) { 574 ArgumentUsesTracker Tracker(SCCNodes); 575 PointerMayBeCaptured(&*A, &Tracker); 576 if (!Tracker.Captured) { 577 if (Tracker.Uses.empty()) { 578 // If it's trivially not captured, mark it nocapture now. 579 A->addAttr( 580 AttributeSet::get(F->getContext(), A->getArgNo() + 1, B)); 581 ++NumNoCapture; 582 Changed = true; 583 } else { 584 // If it's not trivially captured and not trivially not captured, 585 // then it must be calling into another function in our SCC. Save 586 // its particulars for Argument-SCC analysis later. 587 ArgumentGraphNode *Node = AG[&*A]; 588 for (Argument *Use : Tracker.Uses) { 589 Node->Uses.push_back(AG[Use]); 590 if (Use != &*A) 591 HasNonLocalUses = true; 592 } 593 } 594 } 595 // Otherwise, it's captured. Don't bother doing SCC analysis on it. 596 } 597 if (!HasNonLocalUses && !A->onlyReadsMemory()) { 598 // Can we determine that it's readonly/readnone without doing an SCC? 599 // Note that we don't allow any calls at all here, or else our result 600 // will be dependent on the iteration order through the functions in the 601 // SCC. 602 SmallPtrSet<Argument *, 8> Self; 603 Self.insert(&*A); 604 Attribute::AttrKind R = determinePointerReadAttrs(&*A, Self); 605 if (R != Attribute::None) { 606 AttrBuilder B; 607 B.addAttribute(R); 608 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 609 Changed = true; 610 R == Attribute::ReadOnly ? ++NumReadOnlyArg : ++NumReadNoneArg; 611 } 612 } 613 } 614 } 615 616 // The graph we've collected is partial because we stopped scanning for 617 // argument uses once we solved the argument trivially. These partial nodes 618 // show up as ArgumentGraphNode objects with an empty Uses list, and for 619 // these nodes the final decision about whether they capture has already been 620 // made. If the definition doesn't have a 'nocapture' attribute by now, it 621 // captures. 622 623 for (scc_iterator<ArgumentGraph *> I = scc_begin(&AG); !I.isAtEnd(); ++I) { 624 const std::vector<ArgumentGraphNode *> &ArgumentSCC = *I; 625 if (ArgumentSCC.size() == 1) { 626 if (!ArgumentSCC[0]->Definition) 627 continue; // synthetic root node 628 629 // eg. "void f(int* x) { if (...) f(x); }" 630 if (ArgumentSCC[0]->Uses.size() == 1 && 631 ArgumentSCC[0]->Uses[0] == ArgumentSCC[0]) { 632 Argument *A = ArgumentSCC[0]->Definition; 633 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 634 ++NumNoCapture; 635 Changed = true; 636 } 637 continue; 638 } 639 640 bool SCCCaptured = false; 641 for (auto I = ArgumentSCC.begin(), E = ArgumentSCC.end(); 642 I != E && !SCCCaptured; ++I) { 643 ArgumentGraphNode *Node = *I; 644 if (Node->Uses.empty()) { 645 if (!Node->Definition->hasNoCaptureAttr()) 646 SCCCaptured = true; 647 } 648 } 649 if (SCCCaptured) 650 continue; 651 652 SmallPtrSet<Argument *, 8> ArgumentSCCNodes; 653 // Fill ArgumentSCCNodes with the elements of the ArgumentSCC. Used for 654 // quickly looking up whether a given Argument is in this ArgumentSCC. 655 for (ArgumentGraphNode *I : ArgumentSCC) { 656 ArgumentSCCNodes.insert(I->Definition); 657 } 658 659 for (auto I = ArgumentSCC.begin(), E = ArgumentSCC.end(); 660 I != E && !SCCCaptured; ++I) { 661 ArgumentGraphNode *N = *I; 662 for (ArgumentGraphNode *Use : N->Uses) { 663 Argument *A = Use->Definition; 664 if (A->hasNoCaptureAttr() || ArgumentSCCNodes.count(A)) 665 continue; 666 SCCCaptured = true; 667 break; 668 } 669 } 670 if (SCCCaptured) 671 continue; 672 673 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 674 Argument *A = ArgumentSCC[i]->Definition; 675 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 676 ++NumNoCapture; 677 Changed = true; 678 } 679 680 // We also want to compute readonly/readnone. With a small number of false 681 // negatives, we can assume that any pointer which is captured isn't going 682 // to be provably readonly or readnone, since by definition we can't 683 // analyze all uses of a captured pointer. 684 // 685 // The false negatives happen when the pointer is captured by a function 686 // that promises readonly/readnone behaviour on the pointer, then the 687 // pointer's lifetime ends before anything that writes to arbitrary memory. 688 // Also, a readonly/readnone pointer may be returned, but returning a 689 // pointer is capturing it. 690 691 Attribute::AttrKind ReadAttr = Attribute::ReadNone; 692 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 693 Argument *A = ArgumentSCC[i]->Definition; 694 Attribute::AttrKind K = determinePointerReadAttrs(A, ArgumentSCCNodes); 695 if (K == Attribute::ReadNone) 696 continue; 697 if (K == Attribute::ReadOnly) { 698 ReadAttr = Attribute::ReadOnly; 699 continue; 700 } 701 ReadAttr = K; 702 break; 703 } 704 705 if (ReadAttr != Attribute::None) { 706 AttrBuilder B, R; 707 B.addAttribute(ReadAttr); 708 R.addAttribute(Attribute::ReadOnly).addAttribute(Attribute::ReadNone); 709 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 710 Argument *A = ArgumentSCC[i]->Definition; 711 // Clear out existing readonly/readnone attributes 712 A->removeAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, R)); 713 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 714 ReadAttr == Attribute::ReadOnly ? ++NumReadOnlyArg : ++NumReadNoneArg; 715 Changed = true; 716 } 717 } 718 } 719 720 return Changed; 721 } 722 723 /// Tests whether a function is "malloc-like". 724 /// 725 /// A function is "malloc-like" if it returns either null or a pointer that 726 /// doesn't alias any other pointer visible to the caller. 727 static bool isFunctionMallocLike(Function *F, const SCCNodeSet &SCCNodes) { 728 SmallSetVector<Value *, 8> FlowsToReturn; 729 for (BasicBlock &BB : *F) 730 if (ReturnInst *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) 731 FlowsToReturn.insert(Ret->getReturnValue()); 732 733 for (unsigned i = 0; i != FlowsToReturn.size(); ++i) { 734 Value *RetVal = FlowsToReturn[i]; 735 736 if (Constant *C = dyn_cast<Constant>(RetVal)) { 737 if (!C->isNullValue() && !isa<UndefValue>(C)) 738 return false; 739 740 continue; 741 } 742 743 if (isa<Argument>(RetVal)) 744 return false; 745 746 if (Instruction *RVI = dyn_cast<Instruction>(RetVal)) 747 switch (RVI->getOpcode()) { 748 // Extend the analysis by looking upwards. 749 case Instruction::BitCast: 750 case Instruction::GetElementPtr: 751 case Instruction::AddrSpaceCast: 752 FlowsToReturn.insert(RVI->getOperand(0)); 753 continue; 754 case Instruction::Select: { 755 SelectInst *SI = cast<SelectInst>(RVI); 756 FlowsToReturn.insert(SI->getTrueValue()); 757 FlowsToReturn.insert(SI->getFalseValue()); 758 continue; 759 } 760 case Instruction::PHI: { 761 PHINode *PN = cast<PHINode>(RVI); 762 for (Value *IncValue : PN->incoming_values()) 763 FlowsToReturn.insert(IncValue); 764 continue; 765 } 766 767 // Check whether the pointer came from an allocation. 768 case Instruction::Alloca: 769 break; 770 case Instruction::Call: 771 case Instruction::Invoke: { 772 CallSite CS(RVI); 773 if (CS.paramHasAttr(0, Attribute::NoAlias)) 774 break; 775 if (CS.getCalledFunction() && SCCNodes.count(CS.getCalledFunction())) 776 break; 777 } // fall-through 778 default: 779 return false; // Did not come from an allocation. 780 } 781 782 if (PointerMayBeCaptured(RetVal, false, /*StoreCaptures=*/false)) 783 return false; 784 } 785 786 return true; 787 } 788 789 /// Deduce noalias attributes for the SCC. 790 static bool addNoAliasAttrs(const SCCNodeSet &SCCNodes) { 791 // Check each function in turn, determining which functions return noalias 792 // pointers. 793 for (Function *F : SCCNodes) { 794 // Already noalias. 795 if (F->doesNotAlias(0)) 796 continue; 797 798 // We can infer and propagate function attributes only when we know that the 799 // definition we'll get at link time is *exactly* the definition we see now. 800 // For more details, see GlobalValue::mayBeDerefined. 801 if (!F->hasExactDefinition()) 802 return false; 803 804 // We annotate noalias return values, which are only applicable to 805 // pointer types. 806 if (!F->getReturnType()->isPointerTy()) 807 continue; 808 809 if (!isFunctionMallocLike(F, SCCNodes)) 810 return false; 811 } 812 813 bool MadeChange = false; 814 for (Function *F : SCCNodes) { 815 if (F->doesNotAlias(0) || !F->getReturnType()->isPointerTy()) 816 continue; 817 818 F->setDoesNotAlias(0); 819 ++NumNoAlias; 820 MadeChange = true; 821 } 822 823 return MadeChange; 824 } 825 826 /// Tests whether this function is known to not return null. 827 /// 828 /// Requires that the function returns a pointer. 829 /// 830 /// Returns true if it believes the function will not return a null, and sets 831 /// \p Speculative based on whether the returned conclusion is a speculative 832 /// conclusion due to SCC calls. 833 static bool isReturnNonNull(Function *F, const SCCNodeSet &SCCNodes, 834 bool &Speculative) { 835 assert(F->getReturnType()->isPointerTy() && 836 "nonnull only meaningful on pointer types"); 837 Speculative = false; 838 839 SmallSetVector<Value *, 8> FlowsToReturn; 840 for (BasicBlock &BB : *F) 841 if (auto *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) 842 FlowsToReturn.insert(Ret->getReturnValue()); 843 844 for (unsigned i = 0; i != FlowsToReturn.size(); ++i) { 845 Value *RetVal = FlowsToReturn[i]; 846 847 // If this value is locally known to be non-null, we're good 848 if (isKnownNonNull(RetVal)) 849 continue; 850 851 // Otherwise, we need to look upwards since we can't make any local 852 // conclusions. 853 Instruction *RVI = dyn_cast<Instruction>(RetVal); 854 if (!RVI) 855 return false; 856 switch (RVI->getOpcode()) { 857 // Extend the analysis by looking upwards. 858 case Instruction::BitCast: 859 case Instruction::GetElementPtr: 860 case Instruction::AddrSpaceCast: 861 FlowsToReturn.insert(RVI->getOperand(0)); 862 continue; 863 case Instruction::Select: { 864 SelectInst *SI = cast<SelectInst>(RVI); 865 FlowsToReturn.insert(SI->getTrueValue()); 866 FlowsToReturn.insert(SI->getFalseValue()); 867 continue; 868 } 869 case Instruction::PHI: { 870 PHINode *PN = cast<PHINode>(RVI); 871 for (int i = 0, e = PN->getNumIncomingValues(); i != e; ++i) 872 FlowsToReturn.insert(PN->getIncomingValue(i)); 873 continue; 874 } 875 case Instruction::Call: 876 case Instruction::Invoke: { 877 CallSite CS(RVI); 878 Function *Callee = CS.getCalledFunction(); 879 // A call to a node within the SCC is assumed to return null until 880 // proven otherwise 881 if (Callee && SCCNodes.count(Callee)) { 882 Speculative = true; 883 continue; 884 } 885 return false; 886 } 887 default: 888 return false; // Unknown source, may be null 889 }; 890 llvm_unreachable("should have either continued or returned"); 891 } 892 893 return true; 894 } 895 896 /// Deduce nonnull attributes for the SCC. 897 static bool addNonNullAttrs(const SCCNodeSet &SCCNodes) { 898 // Speculative that all functions in the SCC return only nonnull 899 // pointers. We may refute this as we analyze functions. 900 bool SCCReturnsNonNull = true; 901 902 bool MadeChange = false; 903 904 // Check each function in turn, determining which functions return nonnull 905 // pointers. 906 for (Function *F : SCCNodes) { 907 // Already nonnull. 908 if (F->getAttributes().hasAttribute(AttributeSet::ReturnIndex, 909 Attribute::NonNull)) 910 continue; 911 912 // We can infer and propagate function attributes only when we know that the 913 // definition we'll get at link time is *exactly* the definition we see now. 914 // For more details, see GlobalValue::mayBeDerefined. 915 if (!F->hasExactDefinition()) 916 return false; 917 918 // We annotate nonnull return values, which are only applicable to 919 // pointer types. 920 if (!F->getReturnType()->isPointerTy()) 921 continue; 922 923 bool Speculative = false; 924 if (isReturnNonNull(F, SCCNodes, Speculative)) { 925 if (!Speculative) { 926 // Mark the function eagerly since we may discover a function 927 // which prevents us from speculating about the entire SCC 928 DEBUG(dbgs() << "Eagerly marking " << F->getName() << " as nonnull\n"); 929 F->addAttribute(AttributeSet::ReturnIndex, Attribute::NonNull); 930 ++NumNonNullReturn; 931 MadeChange = true; 932 } 933 continue; 934 } 935 // At least one function returns something which could be null, can't 936 // speculate any more. 937 SCCReturnsNonNull = false; 938 } 939 940 if (SCCReturnsNonNull) { 941 for (Function *F : SCCNodes) { 942 if (F->getAttributes().hasAttribute(AttributeSet::ReturnIndex, 943 Attribute::NonNull) || 944 !F->getReturnType()->isPointerTy()) 945 continue; 946 947 DEBUG(dbgs() << "SCC marking " << F->getName() << " as nonnull\n"); 948 F->addAttribute(AttributeSet::ReturnIndex, Attribute::NonNull); 949 ++NumNonNullReturn; 950 MadeChange = true; 951 } 952 } 953 954 return MadeChange; 955 } 956 957 /// Remove the convergent attribute from all functions in the SCC if every 958 /// callsite within the SCC is not convergent (except for calls to functions 959 /// within the SCC). Returns true if changes were made. 960 static bool removeConvergentAttrs(const SCCNodeSet &SCCNodes) { 961 // For every function in SCC, ensure that either 962 // * it is not convergent, or 963 // * we can remove its convergent attribute. 964 bool HasConvergentFn = false; 965 for (Function *F : SCCNodes) { 966 if (!F->isConvergent()) continue; 967 HasConvergentFn = true; 968 969 // Can't remove convergent from function declarations. 970 if (F->isDeclaration()) return false; 971 972 // Can't remove convergent if any of our functions has a convergent call to a 973 // function not in the SCC. 974 for (Instruction &I : instructions(*F)) { 975 CallSite CS(&I); 976 // Bail if CS is a convergent call to a function not in the SCC. 977 if (CS && CS.isConvergent() && 978 SCCNodes.count(CS.getCalledFunction()) == 0) 979 return false; 980 } 981 } 982 983 // If the SCC doesn't have any convergent functions, we have nothing to do. 984 if (!HasConvergentFn) return false; 985 986 // If we got here, all of the calls the SCC makes to functions not in the SCC 987 // are non-convergent. Therefore all of the SCC's functions can also be made 988 // non-convergent. We'll remove the attr from the callsites in 989 // InstCombineCalls. 990 for (Function *F : SCCNodes) { 991 if (!F->isConvergent()) continue; 992 993 DEBUG(dbgs() << "Removing convergent attr from fn " << F->getName() 994 << "\n"); 995 F->setNotConvergent(); 996 } 997 return true; 998 } 999 1000 static bool setDoesNotRecurse(Function &F) { 1001 if (F.doesNotRecurse()) 1002 return false; 1003 F.setDoesNotRecurse(); 1004 ++NumNoRecurse; 1005 return true; 1006 } 1007 1008 static bool addNoRecurseAttrs(const SCCNodeSet &SCCNodes) { 1009 // Try and identify functions that do not recurse. 1010 1011 // If the SCC contains multiple nodes we know for sure there is recursion. 1012 if (SCCNodes.size() != 1) 1013 return false; 1014 1015 Function *F = *SCCNodes.begin(); 1016 if (!F || F->isDeclaration() || F->doesNotRecurse()) 1017 return false; 1018 1019 // If all of the calls in F are identifiable and are to norecurse functions, F 1020 // is norecurse. This check also detects self-recursion as F is not currently 1021 // marked norecurse, so any called from F to F will not be marked norecurse. 1022 for (Instruction &I : instructions(*F)) 1023 if (auto CS = CallSite(&I)) { 1024 Function *Callee = CS.getCalledFunction(); 1025 if (!Callee || Callee == F || !Callee->doesNotRecurse()) 1026 // Function calls a potentially recursive function. 1027 return false; 1028 } 1029 1030 // Every call was to a non-recursive function other than this function, and 1031 // we have no indirect recursion as the SCC size is one. This function cannot 1032 // recurse. 1033 return setDoesNotRecurse(*F); 1034 } 1035 1036 PreservedAnalyses PostOrderFunctionAttrsPass::run(LazyCallGraph::SCC &C, 1037 CGSCCAnalysisManager &AM) { 1038 FunctionAnalysisManager &FAM = 1039 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C).getManager(); 1040 1041 // We pass a lambda into functions to wire them up to the analysis manager 1042 // for getting function analyses. 1043 auto AARGetter = [&](Function &F) -> AAResults & { 1044 return FAM.getResult<AAManager>(F); 1045 }; 1046 1047 // Fill SCCNodes with the elements of the SCC. Also track whether there are 1048 // any external or opt-none nodes that will prevent us from optimizing any 1049 // part of the SCC. 1050 SCCNodeSet SCCNodes; 1051 bool HasUnknownCall = false; 1052 for (LazyCallGraph::Node &N : C) { 1053 Function &F = N.getFunction(); 1054 if (F.hasFnAttribute(Attribute::OptimizeNone)) { 1055 // Treat any function we're trying not to optimize as if it were an 1056 // indirect call and omit it from the node set used below. 1057 HasUnknownCall = true; 1058 continue; 1059 } 1060 // Track whether any functions in this SCC have an unknown call edge. 1061 // Note: if this is ever a performance hit, we can common it with 1062 // subsequent routines which also do scans over the instructions of the 1063 // function. 1064 if (!HasUnknownCall) 1065 for (Instruction &I : instructions(F)) 1066 if (auto CS = CallSite(&I)) 1067 if (!CS.getCalledFunction()) { 1068 HasUnknownCall = true; 1069 break; 1070 } 1071 1072 SCCNodes.insert(&F); 1073 } 1074 1075 bool Changed = false; 1076 Changed |= addArgumentReturnedAttrs(SCCNodes); 1077 Changed |= addReadAttrs(SCCNodes, AARGetter); 1078 Changed |= addArgumentAttrs(SCCNodes); 1079 1080 // If we have no external nodes participating in the SCC, we can deduce some 1081 // more precise attributes as well. 1082 if (!HasUnknownCall) { 1083 Changed |= addNoAliasAttrs(SCCNodes); 1084 Changed |= addNonNullAttrs(SCCNodes); 1085 Changed |= removeConvergentAttrs(SCCNodes); 1086 Changed |= addNoRecurseAttrs(SCCNodes); 1087 } 1088 1089 return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all(); 1090 } 1091 1092 namespace { 1093 struct PostOrderFunctionAttrsLegacyPass : public CallGraphSCCPass { 1094 static char ID; // Pass identification, replacement for typeid 1095 PostOrderFunctionAttrsLegacyPass() : CallGraphSCCPass(ID) { 1096 initializePostOrderFunctionAttrsLegacyPassPass(*PassRegistry::getPassRegistry()); 1097 } 1098 1099 bool runOnSCC(CallGraphSCC &SCC) override; 1100 1101 void getAnalysisUsage(AnalysisUsage &AU) const override { 1102 AU.setPreservesCFG(); 1103 AU.addRequired<AssumptionCacheTracker>(); 1104 getAAResultsAnalysisUsage(AU); 1105 CallGraphSCCPass::getAnalysisUsage(AU); 1106 } 1107 }; 1108 } 1109 1110 char PostOrderFunctionAttrsLegacyPass::ID = 0; 1111 INITIALIZE_PASS_BEGIN(PostOrderFunctionAttrsLegacyPass, "functionattrs", 1112 "Deduce function attributes", false, false) 1113 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) 1114 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass) 1115 INITIALIZE_PASS_END(PostOrderFunctionAttrsLegacyPass, "functionattrs", 1116 "Deduce function attributes", false, false) 1117 1118 Pass *llvm::createPostOrderFunctionAttrsLegacyPass() { return new PostOrderFunctionAttrsLegacyPass(); } 1119 1120 template <typename AARGetterT> 1121 static bool runImpl(CallGraphSCC &SCC, AARGetterT AARGetter) { 1122 bool Changed = false; 1123 1124 // Fill SCCNodes with the elements of the SCC. Used for quickly looking up 1125 // whether a given CallGraphNode is in this SCC. Also track whether there are 1126 // any external or opt-none nodes that will prevent us from optimizing any 1127 // part of the SCC. 1128 SCCNodeSet SCCNodes; 1129 bool ExternalNode = false; 1130 for (CallGraphNode *I : SCC) { 1131 Function *F = I->getFunction(); 1132 if (!F || F->hasFnAttribute(Attribute::OptimizeNone)) { 1133 // External node or function we're trying not to optimize - we both avoid 1134 // transform them and avoid leveraging information they provide. 1135 ExternalNode = true; 1136 continue; 1137 } 1138 1139 SCCNodes.insert(F); 1140 } 1141 1142 Changed |= addArgumentReturnedAttrs(SCCNodes); 1143 Changed |= addReadAttrs(SCCNodes, AARGetter); 1144 Changed |= addArgumentAttrs(SCCNodes); 1145 1146 // If we have no external nodes participating in the SCC, we can deduce some 1147 // more precise attributes as well. 1148 if (!ExternalNode) { 1149 Changed |= addNoAliasAttrs(SCCNodes); 1150 Changed |= addNonNullAttrs(SCCNodes); 1151 Changed |= removeConvergentAttrs(SCCNodes); 1152 Changed |= addNoRecurseAttrs(SCCNodes); 1153 } 1154 1155 return Changed; 1156 } 1157 1158 bool PostOrderFunctionAttrsLegacyPass::runOnSCC(CallGraphSCC &SCC) { 1159 if (skipSCC(SCC)) 1160 return false; 1161 1162 // We compute dedicated AA results for each function in the SCC as needed. We 1163 // use a lambda referencing external objects so that they live long enough to 1164 // be queried, but we re-use them each time. 1165 Optional<BasicAAResult> BAR; 1166 Optional<AAResults> AAR; 1167 auto AARGetter = [&](Function &F) -> AAResults & { 1168 BAR.emplace(createLegacyPMBasicAAResult(*this, F)); 1169 AAR.emplace(createLegacyPMAAResults(*this, F, *BAR)); 1170 return *AAR; 1171 }; 1172 1173 return runImpl(SCC, AARGetter); 1174 } 1175 1176 namespace { 1177 struct ReversePostOrderFunctionAttrsLegacyPass : public ModulePass { 1178 static char ID; // Pass identification, replacement for typeid 1179 ReversePostOrderFunctionAttrsLegacyPass() : ModulePass(ID) { 1180 initializeReversePostOrderFunctionAttrsLegacyPassPass(*PassRegistry::getPassRegistry()); 1181 } 1182 1183 bool runOnModule(Module &M) override; 1184 1185 void getAnalysisUsage(AnalysisUsage &AU) const override { 1186 AU.setPreservesCFG(); 1187 AU.addRequired<CallGraphWrapperPass>(); 1188 AU.addPreserved<CallGraphWrapperPass>(); 1189 } 1190 }; 1191 } 1192 1193 char ReversePostOrderFunctionAttrsLegacyPass::ID = 0; 1194 INITIALIZE_PASS_BEGIN(ReversePostOrderFunctionAttrsLegacyPass, "rpo-functionattrs", 1195 "Deduce function attributes in RPO", false, false) 1196 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass) 1197 INITIALIZE_PASS_END(ReversePostOrderFunctionAttrsLegacyPass, "rpo-functionattrs", 1198 "Deduce function attributes in RPO", false, false) 1199 1200 Pass *llvm::createReversePostOrderFunctionAttrsPass() { 1201 return new ReversePostOrderFunctionAttrsLegacyPass(); 1202 } 1203 1204 static bool addNoRecurseAttrsTopDown(Function &F) { 1205 // We check the preconditions for the function prior to calling this to avoid 1206 // the cost of building up a reversible post-order list. We assert them here 1207 // to make sure none of the invariants this relies on were violated. 1208 assert(!F.isDeclaration() && "Cannot deduce norecurse without a definition!"); 1209 assert(!F.doesNotRecurse() && 1210 "This function has already been deduced as norecurs!"); 1211 assert(F.hasInternalLinkage() && 1212 "Can only do top-down deduction for internal linkage functions!"); 1213 1214 // If F is internal and all of its uses are calls from a non-recursive 1215 // functions, then none of its calls could in fact recurse without going 1216 // through a function marked norecurse, and so we can mark this function too 1217 // as norecurse. Note that the uses must actually be calls -- otherwise 1218 // a pointer to this function could be returned from a norecurse function but 1219 // this function could be recursively (indirectly) called. Note that this 1220 // also detects if F is directly recursive as F is not yet marked as 1221 // a norecurse function. 1222 for (auto *U : F.users()) { 1223 auto *I = dyn_cast<Instruction>(U); 1224 if (!I) 1225 return false; 1226 CallSite CS(I); 1227 if (!CS || !CS.getParent()->getParent()->doesNotRecurse()) 1228 return false; 1229 } 1230 return setDoesNotRecurse(F); 1231 } 1232 1233 static bool deduceFunctionAttributeInRPO(Module &M, CallGraph &CG) { 1234 // We only have a post-order SCC traversal (because SCCs are inherently 1235 // discovered in post-order), so we accumulate them in a vector and then walk 1236 // it in reverse. This is simpler than using the RPO iterator infrastructure 1237 // because we need to combine SCC detection and the PO walk of the call 1238 // graph. We can also cheat egregiously because we're primarily interested in 1239 // synthesizing norecurse and so we can only save the singular SCCs as SCCs 1240 // with multiple functions in them will clearly be recursive. 1241 SmallVector<Function *, 16> Worklist; 1242 for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) { 1243 if (I->size() != 1) 1244 continue; 1245 1246 Function *F = I->front()->getFunction(); 1247 if (F && !F->isDeclaration() && !F->doesNotRecurse() && 1248 F->hasInternalLinkage()) 1249 Worklist.push_back(F); 1250 } 1251 1252 bool Changed = false; 1253 for (auto *F : reverse(Worklist)) 1254 Changed |= addNoRecurseAttrsTopDown(*F); 1255 1256 return Changed; 1257 } 1258 1259 bool ReversePostOrderFunctionAttrsLegacyPass::runOnModule(Module &M) { 1260 if (skipModule(M)) 1261 return false; 1262 1263 auto &CG = getAnalysis<CallGraphWrapperPass>().getCallGraph(); 1264 1265 return deduceFunctionAttributeInRPO(M, CG); 1266 } 1267 1268 PreservedAnalyses 1269 ReversePostOrderFunctionAttrsPass::run(Module &M, AnalysisManager<Module> &AM) { 1270 auto &CG = AM.getResult<CallGraphAnalysis>(M); 1271 1272 bool Changed = deduceFunctionAttributeInRPO(M, CG); 1273 if (!Changed) 1274 return PreservedAnalyses::all(); 1275 PreservedAnalyses PA; 1276 PA.preserve<CallGraphAnalysis>(); 1277 return PA; 1278 } 1279