1 //===- FunctionAttrs.cpp - Pass which marks functions attributes ----------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 /// 10 /// \file 11 /// This file implements interprocedural passes which walk the 12 /// call-graph deducing and/or propagating function attributes. 13 /// 14 //===----------------------------------------------------------------------===// 15 16 #include "llvm/Transforms/IPO/FunctionAttrs.h" 17 #include "llvm/Transforms/IPO.h" 18 #include "llvm/ADT/SCCIterator.h" 19 #include "llvm/ADT/SetVector.h" 20 #include "llvm/ADT/SmallSet.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/ADT/StringSwitch.h" 23 #include "llvm/Analysis/AliasAnalysis.h" 24 #include "llvm/Analysis/AssumptionCache.h" 25 #include "llvm/Analysis/BasicAliasAnalysis.h" 26 #include "llvm/Analysis/CallGraph.h" 27 #include "llvm/Analysis/CallGraphSCCPass.h" 28 #include "llvm/Analysis/CaptureTracking.h" 29 #include "llvm/Analysis/TargetLibraryInfo.h" 30 #include "llvm/Analysis/ValueTracking.h" 31 #include "llvm/IR/GlobalVariable.h" 32 #include "llvm/IR/InstIterator.h" 33 #include "llvm/IR/IntrinsicInst.h" 34 #include "llvm/IR/LLVMContext.h" 35 #include "llvm/Support/Debug.h" 36 #include "llvm/Support/raw_ostream.h" 37 #include "llvm/Analysis/TargetLibraryInfo.h" 38 using namespace llvm; 39 40 #define DEBUG_TYPE "functionattrs" 41 42 STATISTIC(NumReadNone, "Number of functions marked readnone"); 43 STATISTIC(NumReadOnly, "Number of functions marked readonly"); 44 STATISTIC(NumNoCapture, "Number of arguments marked nocapture"); 45 STATISTIC(NumReturned, "Number of arguments marked returned"); 46 STATISTIC(NumReadNoneArg, "Number of arguments marked readnone"); 47 STATISTIC(NumReadOnlyArg, "Number of arguments marked readonly"); 48 STATISTIC(NumNoAlias, "Number of function returns marked noalias"); 49 STATISTIC(NumNonNullReturn, "Number of function returns marked nonnull"); 50 STATISTIC(NumNoRecurse, "Number of functions marked as norecurse"); 51 52 namespace { 53 typedef SmallSetVector<Function *, 8> SCCNodeSet; 54 } 55 56 namespace { 57 /// The three kinds of memory access relevant to 'readonly' and 58 /// 'readnone' attributes. 59 enum MemoryAccessKind { 60 MAK_ReadNone = 0, 61 MAK_ReadOnly = 1, 62 MAK_MayWrite = 2 63 }; 64 } 65 66 static MemoryAccessKind checkFunctionMemoryAccess(Function &F, AAResults &AAR, 67 const SCCNodeSet &SCCNodes) { 68 FunctionModRefBehavior MRB = AAR.getModRefBehavior(&F); 69 if (MRB == FMRB_DoesNotAccessMemory) 70 // Already perfect! 71 return MAK_ReadNone; 72 73 // Non-exact function definitions may not be selected at link time, and an 74 // alternative version that writes to memory may be selected. See the comment 75 // on GlobalValue::isDefinitionExact for more details. 76 if (!F.hasExactDefinition()) { 77 if (AliasAnalysis::onlyReadsMemory(MRB)) 78 return MAK_ReadOnly; 79 80 // Conservatively assume it writes to memory. 81 return MAK_MayWrite; 82 } 83 84 // Scan the function body for instructions that may read or write memory. 85 bool ReadsMemory = false; 86 for (inst_iterator II = inst_begin(F), E = inst_end(F); II != E; ++II) { 87 Instruction *I = &*II; 88 89 // Some instructions can be ignored even if they read or write memory. 90 // Detect these now, skipping to the next instruction if one is found. 91 CallSite CS(cast<Value>(I)); 92 if (CS) { 93 // Ignore calls to functions in the same SCC, as long as the call sites 94 // don't have operand bundles. Calls with operand bundles are allowed to 95 // have memory effects not described by the memory effects of the call 96 // target. 97 if (!CS.hasOperandBundles() && CS.getCalledFunction() && 98 SCCNodes.count(CS.getCalledFunction())) 99 continue; 100 FunctionModRefBehavior MRB = AAR.getModRefBehavior(CS); 101 102 // If the call doesn't access memory, we're done. 103 if (!(MRB & MRI_ModRef)) 104 continue; 105 106 if (!AliasAnalysis::onlyAccessesArgPointees(MRB)) { 107 // The call could access any memory. If that includes writes, give up. 108 if (MRB & MRI_Mod) 109 return MAK_MayWrite; 110 // If it reads, note it. 111 if (MRB & MRI_Ref) 112 ReadsMemory = true; 113 continue; 114 } 115 116 // Check whether all pointer arguments point to local memory, and 117 // ignore calls that only access local memory. 118 for (CallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end(); 119 CI != CE; ++CI) { 120 Value *Arg = *CI; 121 if (!Arg->getType()->isPtrOrPtrVectorTy()) 122 continue; 123 124 AAMDNodes AAInfo; 125 I->getAAMetadata(AAInfo); 126 MemoryLocation Loc(Arg, MemoryLocation::UnknownSize, AAInfo); 127 128 // Skip accesses to local or constant memory as they don't impact the 129 // externally visible mod/ref behavior. 130 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 131 continue; 132 133 if (MRB & MRI_Mod) 134 // Writes non-local memory. Give up. 135 return MAK_MayWrite; 136 if (MRB & MRI_Ref) 137 // Ok, it reads non-local memory. 138 ReadsMemory = true; 139 } 140 continue; 141 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) { 142 // Ignore non-volatile loads from local memory. (Atomic is okay here.) 143 if (!LI->isVolatile()) { 144 MemoryLocation Loc = MemoryLocation::get(LI); 145 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 146 continue; 147 } 148 } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) { 149 // Ignore non-volatile stores to local memory. (Atomic is okay here.) 150 if (!SI->isVolatile()) { 151 MemoryLocation Loc = MemoryLocation::get(SI); 152 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 153 continue; 154 } 155 } else if (VAArgInst *VI = dyn_cast<VAArgInst>(I)) { 156 // Ignore vaargs on local memory. 157 MemoryLocation Loc = MemoryLocation::get(VI); 158 if (AAR.pointsToConstantMemory(Loc, /*OrLocal=*/true)) 159 continue; 160 } 161 162 // Any remaining instructions need to be taken seriously! Check if they 163 // read or write memory. 164 if (I->mayWriteToMemory()) 165 // Writes memory. Just give up. 166 return MAK_MayWrite; 167 168 // If this instruction may read memory, remember that. 169 ReadsMemory |= I->mayReadFromMemory(); 170 } 171 172 return ReadsMemory ? MAK_ReadOnly : MAK_ReadNone; 173 } 174 175 /// Deduce readonly/readnone attributes for the SCC. 176 template <typename AARGetterT> 177 static bool addReadAttrs(const SCCNodeSet &SCCNodes, AARGetterT AARGetter) { 178 // Check if any of the functions in the SCC read or write memory. If they 179 // write memory then they can't be marked readnone or readonly. 180 bool ReadsMemory = false; 181 for (Function *F : SCCNodes) { 182 // Call the callable parameter to look up AA results for this function. 183 AAResults &AAR = AARGetter(*F); 184 185 switch (checkFunctionMemoryAccess(*F, AAR, SCCNodes)) { 186 case MAK_MayWrite: 187 return false; 188 case MAK_ReadOnly: 189 ReadsMemory = true; 190 break; 191 case MAK_ReadNone: 192 // Nothing to do! 193 break; 194 } 195 } 196 197 // Success! Functions in this SCC do not access memory, or only read memory. 198 // Give them the appropriate attribute. 199 bool MadeChange = false; 200 for (Function *F : SCCNodes) { 201 if (F->doesNotAccessMemory()) 202 // Already perfect! 203 continue; 204 205 if (F->onlyReadsMemory() && ReadsMemory) 206 // No change. 207 continue; 208 209 MadeChange = true; 210 211 // Clear out any existing attributes. 212 AttrBuilder B; 213 B.addAttribute(Attribute::ReadOnly).addAttribute(Attribute::ReadNone); 214 F->removeAttributes( 215 AttributeSet::FunctionIndex, 216 AttributeSet::get(F->getContext(), AttributeSet::FunctionIndex, B)); 217 218 // Add in the new attribute. 219 F->addAttribute(AttributeSet::FunctionIndex, 220 ReadsMemory ? Attribute::ReadOnly : Attribute::ReadNone); 221 222 if (ReadsMemory) 223 ++NumReadOnly; 224 else 225 ++NumReadNone; 226 } 227 228 return MadeChange; 229 } 230 231 namespace { 232 /// For a given pointer Argument, this retains a list of Arguments of functions 233 /// in the same SCC that the pointer data flows into. We use this to build an 234 /// SCC of the arguments. 235 struct ArgumentGraphNode { 236 Argument *Definition; 237 SmallVector<ArgumentGraphNode *, 4> Uses; 238 }; 239 240 class ArgumentGraph { 241 // We store pointers to ArgumentGraphNode objects, so it's important that 242 // that they not move around upon insert. 243 typedef std::map<Argument *, ArgumentGraphNode> ArgumentMapTy; 244 245 ArgumentMapTy ArgumentMap; 246 247 // There is no root node for the argument graph, in fact: 248 // void f(int *x, int *y) { if (...) f(x, y); } 249 // is an example where the graph is disconnected. The SCCIterator requires a 250 // single entry point, so we maintain a fake ("synthetic") root node that 251 // uses every node. Because the graph is directed and nothing points into 252 // the root, it will not participate in any SCCs (except for its own). 253 ArgumentGraphNode SyntheticRoot; 254 255 public: 256 ArgumentGraph() { SyntheticRoot.Definition = nullptr; } 257 258 typedef SmallVectorImpl<ArgumentGraphNode *>::iterator iterator; 259 260 iterator begin() { return SyntheticRoot.Uses.begin(); } 261 iterator end() { return SyntheticRoot.Uses.end(); } 262 ArgumentGraphNode *getEntryNode() { return &SyntheticRoot; } 263 264 ArgumentGraphNode *operator[](Argument *A) { 265 ArgumentGraphNode &Node = ArgumentMap[A]; 266 Node.Definition = A; 267 SyntheticRoot.Uses.push_back(&Node); 268 return &Node; 269 } 270 }; 271 272 /// This tracker checks whether callees are in the SCC, and if so it does not 273 /// consider that a capture, instead adding it to the "Uses" list and 274 /// continuing with the analysis. 275 struct ArgumentUsesTracker : public CaptureTracker { 276 ArgumentUsesTracker(const SCCNodeSet &SCCNodes) 277 : Captured(false), SCCNodes(SCCNodes) {} 278 279 void tooManyUses() override { Captured = true; } 280 281 bool captured(const Use *U) override { 282 CallSite CS(U->getUser()); 283 if (!CS.getInstruction()) { 284 Captured = true; 285 return true; 286 } 287 288 Function *F = CS.getCalledFunction(); 289 if (!F || !F->hasExactDefinition() || !SCCNodes.count(F)) { 290 Captured = true; 291 return true; 292 } 293 294 // Note: the callee and the two successor blocks *follow* the argument 295 // operands. This means there is no need to adjust UseIndex to account for 296 // these. 297 298 unsigned UseIndex = 299 std::distance(const_cast<const Use *>(CS.arg_begin()), U); 300 301 assert(UseIndex < CS.data_operands_size() && 302 "Indirect function calls should have been filtered above!"); 303 304 if (UseIndex >= CS.getNumArgOperands()) { 305 // Data operand, but not a argument operand -- must be a bundle operand 306 assert(CS.hasOperandBundles() && "Must be!"); 307 308 // CaptureTracking told us that we're being captured by an operand bundle 309 // use. In this case it does not matter if the callee is within our SCC 310 // or not -- we've been captured in some unknown way, and we have to be 311 // conservative. 312 Captured = true; 313 return true; 314 } 315 316 if (UseIndex >= F->arg_size()) { 317 assert(F->isVarArg() && "More params than args in non-varargs call"); 318 Captured = true; 319 return true; 320 } 321 322 Uses.push_back(&*std::next(F->arg_begin(), UseIndex)); 323 return false; 324 } 325 326 bool Captured; // True only if certainly captured (used outside our SCC). 327 SmallVector<Argument *, 4> Uses; // Uses within our SCC. 328 329 const SCCNodeSet &SCCNodes; 330 }; 331 } 332 333 namespace llvm { 334 template <> struct GraphTraits<ArgumentGraphNode *> { 335 typedef ArgumentGraphNode *NodeRef; 336 typedef SmallVectorImpl<ArgumentGraphNode *>::iterator ChildIteratorType; 337 338 static NodeRef getEntryNode(NodeRef A) { return A; } 339 static ChildIteratorType child_begin(NodeRef N) { return N->Uses.begin(); } 340 static ChildIteratorType child_end(NodeRef N) { return N->Uses.end(); } 341 }; 342 template <> 343 struct GraphTraits<ArgumentGraph *> : public GraphTraits<ArgumentGraphNode *> { 344 static NodeRef getEntryNode(ArgumentGraph *AG) { return AG->getEntryNode(); } 345 static ChildIteratorType nodes_begin(ArgumentGraph *AG) { 346 return AG->begin(); 347 } 348 static ChildIteratorType nodes_end(ArgumentGraph *AG) { return AG->end(); } 349 }; 350 } 351 352 /// Returns Attribute::None, Attribute::ReadOnly or Attribute::ReadNone. 353 static Attribute::AttrKind 354 determinePointerReadAttrs(Argument *A, 355 const SmallPtrSet<Argument *, 8> &SCCNodes) { 356 357 SmallVector<Use *, 32> Worklist; 358 SmallSet<Use *, 32> Visited; 359 360 // inalloca arguments are always clobbered by the call. 361 if (A->hasInAllocaAttr()) 362 return Attribute::None; 363 364 bool IsRead = false; 365 // We don't need to track IsWritten. If A is written to, return immediately. 366 367 for (Use &U : A->uses()) { 368 Visited.insert(&U); 369 Worklist.push_back(&U); 370 } 371 372 while (!Worklist.empty()) { 373 Use *U = Worklist.pop_back_val(); 374 Instruction *I = cast<Instruction>(U->getUser()); 375 376 switch (I->getOpcode()) { 377 case Instruction::BitCast: 378 case Instruction::GetElementPtr: 379 case Instruction::PHI: 380 case Instruction::Select: 381 case Instruction::AddrSpaceCast: 382 // The original value is not read/written via this if the new value isn't. 383 for (Use &UU : I->uses()) 384 if (Visited.insert(&UU).second) 385 Worklist.push_back(&UU); 386 break; 387 388 case Instruction::Call: 389 case Instruction::Invoke: { 390 bool Captures = true; 391 392 if (I->getType()->isVoidTy()) 393 Captures = false; 394 395 auto AddUsersToWorklistIfCapturing = [&] { 396 if (Captures) 397 for (Use &UU : I->uses()) 398 if (Visited.insert(&UU).second) 399 Worklist.push_back(&UU); 400 }; 401 402 CallSite CS(I); 403 if (CS.doesNotAccessMemory()) { 404 AddUsersToWorklistIfCapturing(); 405 continue; 406 } 407 408 Function *F = CS.getCalledFunction(); 409 if (!F) { 410 if (CS.onlyReadsMemory()) { 411 IsRead = true; 412 AddUsersToWorklistIfCapturing(); 413 continue; 414 } 415 return Attribute::None; 416 } 417 418 // Note: the callee and the two successor blocks *follow* the argument 419 // operands. This means there is no need to adjust UseIndex to account 420 // for these. 421 422 unsigned UseIndex = std::distance(CS.arg_begin(), U); 423 424 // U cannot be the callee operand use: since we're exploring the 425 // transitive uses of an Argument, having such a use be a callee would 426 // imply the CallSite is an indirect call or invoke; and we'd take the 427 // early exit above. 428 assert(UseIndex < CS.data_operands_size() && 429 "Data operand use expected!"); 430 431 bool IsOperandBundleUse = UseIndex >= CS.getNumArgOperands(); 432 433 if (UseIndex >= F->arg_size() && !IsOperandBundleUse) { 434 assert(F->isVarArg() && "More params than args in non-varargs call"); 435 return Attribute::None; 436 } 437 438 Captures &= !CS.doesNotCapture(UseIndex); 439 440 // Since the optimizer (by design) cannot see the data flow corresponding 441 // to a operand bundle use, these cannot participate in the optimistic SCC 442 // analysis. Instead, we model the operand bundle uses as arguments in 443 // call to a function external to the SCC. 444 if (IsOperandBundleUse || 445 !SCCNodes.count(&*std::next(F->arg_begin(), UseIndex))) { 446 447 // The accessors used on CallSite here do the right thing for calls and 448 // invokes with operand bundles. 449 450 if (!CS.onlyReadsMemory() && !CS.onlyReadsMemory(UseIndex)) 451 return Attribute::None; 452 if (!CS.doesNotAccessMemory(UseIndex)) 453 IsRead = true; 454 } 455 456 AddUsersToWorklistIfCapturing(); 457 break; 458 } 459 460 case Instruction::Load: 461 // A volatile load has side effects beyond what readonly can be relied 462 // upon. 463 if (cast<LoadInst>(I)->isVolatile()) 464 return Attribute::None; 465 466 IsRead = true; 467 break; 468 469 case Instruction::ICmp: 470 case Instruction::Ret: 471 break; 472 473 default: 474 return Attribute::None; 475 } 476 } 477 478 return IsRead ? Attribute::ReadOnly : Attribute::ReadNone; 479 } 480 481 /// Deduce returned attributes for the SCC. 482 static bool addArgumentReturnedAttrs(const SCCNodeSet &SCCNodes) { 483 bool Changed = false; 484 485 AttrBuilder B; 486 B.addAttribute(Attribute::Returned); 487 488 // Check each function in turn, determining if an argument is always returned. 489 for (Function *F : SCCNodes) { 490 // We can infer and propagate function attributes only when we know that the 491 // definition we'll get at link time is *exactly* the definition we see now. 492 // For more details, see GlobalValue::mayBeDerefined. 493 if (!F->hasExactDefinition()) 494 continue; 495 496 if (F->getReturnType()->isVoidTy()) 497 continue; 498 499 auto FindRetArg = [&]() -> Value * { 500 Value *RetArg = nullptr; 501 for (BasicBlock &BB : *F) 502 if (auto *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) { 503 // Note that stripPointerCasts should look through functions with 504 // returned arguments. 505 Value *RetVal = Ret->getReturnValue()->stripPointerCasts(); 506 if (!isa<Argument>(RetVal) || RetVal->getType() != F->getReturnType()) 507 return nullptr; 508 509 if (!RetArg) 510 RetArg = RetVal; 511 else if (RetArg != RetVal) 512 return nullptr; 513 } 514 515 return RetArg; 516 }; 517 518 if (Value *RetArg = FindRetArg()) { 519 auto *A = cast<Argument>(RetArg); 520 A->addAttr(AttributeSet::get(F->getContext(), A->getArgNo() + 1, B)); 521 ++NumReturned; 522 Changed = true; 523 } 524 } 525 526 return Changed; 527 } 528 529 /// Deduce nocapture attributes for the SCC. 530 static bool addArgumentAttrs(const SCCNodeSet &SCCNodes) { 531 bool Changed = false; 532 533 ArgumentGraph AG; 534 535 AttrBuilder B; 536 B.addAttribute(Attribute::NoCapture); 537 538 // Check each function in turn, determining which pointer arguments are not 539 // captured. 540 for (Function *F : SCCNodes) { 541 // We can infer and propagate function attributes only when we know that the 542 // definition we'll get at link time is *exactly* the definition we see now. 543 // For more details, see GlobalValue::mayBeDerefined. 544 if (!F->hasExactDefinition()) 545 continue; 546 547 // Functions that are readonly (or readnone) and nounwind and don't return 548 // a value can't capture arguments. Don't analyze them. 549 if (F->onlyReadsMemory() && F->doesNotThrow() && 550 F->getReturnType()->isVoidTy()) { 551 for (Function::arg_iterator A = F->arg_begin(), E = F->arg_end(); A != E; 552 ++A) { 553 if (A->getType()->isPointerTy() && !A->hasNoCaptureAttr()) { 554 A->addAttr(AttributeSet::get(F->getContext(), A->getArgNo() + 1, B)); 555 ++NumNoCapture; 556 Changed = true; 557 } 558 } 559 continue; 560 } 561 562 for (Function::arg_iterator A = F->arg_begin(), E = F->arg_end(); A != E; 563 ++A) { 564 if (!A->getType()->isPointerTy()) 565 continue; 566 bool HasNonLocalUses = false; 567 if (!A->hasNoCaptureAttr()) { 568 ArgumentUsesTracker Tracker(SCCNodes); 569 PointerMayBeCaptured(&*A, &Tracker); 570 if (!Tracker.Captured) { 571 if (Tracker.Uses.empty()) { 572 // If it's trivially not captured, mark it nocapture now. 573 A->addAttr( 574 AttributeSet::get(F->getContext(), A->getArgNo() + 1, B)); 575 ++NumNoCapture; 576 Changed = true; 577 } else { 578 // If it's not trivially captured and not trivially not captured, 579 // then it must be calling into another function in our SCC. Save 580 // its particulars for Argument-SCC analysis later. 581 ArgumentGraphNode *Node = AG[&*A]; 582 for (Argument *Use : Tracker.Uses) { 583 Node->Uses.push_back(AG[Use]); 584 if (Use != &*A) 585 HasNonLocalUses = true; 586 } 587 } 588 } 589 // Otherwise, it's captured. Don't bother doing SCC analysis on it. 590 } 591 if (!HasNonLocalUses && !A->onlyReadsMemory()) { 592 // Can we determine that it's readonly/readnone without doing an SCC? 593 // Note that we don't allow any calls at all here, or else our result 594 // will be dependent on the iteration order through the functions in the 595 // SCC. 596 SmallPtrSet<Argument *, 8> Self; 597 Self.insert(&*A); 598 Attribute::AttrKind R = determinePointerReadAttrs(&*A, Self); 599 if (R != Attribute::None) { 600 AttrBuilder B; 601 B.addAttribute(R); 602 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 603 Changed = true; 604 R == Attribute::ReadOnly ? ++NumReadOnlyArg : ++NumReadNoneArg; 605 } 606 } 607 } 608 } 609 610 // The graph we've collected is partial because we stopped scanning for 611 // argument uses once we solved the argument trivially. These partial nodes 612 // show up as ArgumentGraphNode objects with an empty Uses list, and for 613 // these nodes the final decision about whether they capture has already been 614 // made. If the definition doesn't have a 'nocapture' attribute by now, it 615 // captures. 616 617 for (scc_iterator<ArgumentGraph *> I = scc_begin(&AG); !I.isAtEnd(); ++I) { 618 const std::vector<ArgumentGraphNode *> &ArgumentSCC = *I; 619 if (ArgumentSCC.size() == 1) { 620 if (!ArgumentSCC[0]->Definition) 621 continue; // synthetic root node 622 623 // eg. "void f(int* x) { if (...) f(x); }" 624 if (ArgumentSCC[0]->Uses.size() == 1 && 625 ArgumentSCC[0]->Uses[0] == ArgumentSCC[0]) { 626 Argument *A = ArgumentSCC[0]->Definition; 627 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 628 ++NumNoCapture; 629 Changed = true; 630 } 631 continue; 632 } 633 634 bool SCCCaptured = false; 635 for (auto I = ArgumentSCC.begin(), E = ArgumentSCC.end(); 636 I != E && !SCCCaptured; ++I) { 637 ArgumentGraphNode *Node = *I; 638 if (Node->Uses.empty()) { 639 if (!Node->Definition->hasNoCaptureAttr()) 640 SCCCaptured = true; 641 } 642 } 643 if (SCCCaptured) 644 continue; 645 646 SmallPtrSet<Argument *, 8> ArgumentSCCNodes; 647 // Fill ArgumentSCCNodes with the elements of the ArgumentSCC. Used for 648 // quickly looking up whether a given Argument is in this ArgumentSCC. 649 for (ArgumentGraphNode *I : ArgumentSCC) { 650 ArgumentSCCNodes.insert(I->Definition); 651 } 652 653 for (auto I = ArgumentSCC.begin(), E = ArgumentSCC.end(); 654 I != E && !SCCCaptured; ++I) { 655 ArgumentGraphNode *N = *I; 656 for (ArgumentGraphNode *Use : N->Uses) { 657 Argument *A = Use->Definition; 658 if (A->hasNoCaptureAttr() || ArgumentSCCNodes.count(A)) 659 continue; 660 SCCCaptured = true; 661 break; 662 } 663 } 664 if (SCCCaptured) 665 continue; 666 667 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 668 Argument *A = ArgumentSCC[i]->Definition; 669 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 670 ++NumNoCapture; 671 Changed = true; 672 } 673 674 // We also want to compute readonly/readnone. With a small number of false 675 // negatives, we can assume that any pointer which is captured isn't going 676 // to be provably readonly or readnone, since by definition we can't 677 // analyze all uses of a captured pointer. 678 // 679 // The false negatives happen when the pointer is captured by a function 680 // that promises readonly/readnone behaviour on the pointer, then the 681 // pointer's lifetime ends before anything that writes to arbitrary memory. 682 // Also, a readonly/readnone pointer may be returned, but returning a 683 // pointer is capturing it. 684 685 Attribute::AttrKind ReadAttr = Attribute::ReadNone; 686 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 687 Argument *A = ArgumentSCC[i]->Definition; 688 Attribute::AttrKind K = determinePointerReadAttrs(A, ArgumentSCCNodes); 689 if (K == Attribute::ReadNone) 690 continue; 691 if (K == Attribute::ReadOnly) { 692 ReadAttr = Attribute::ReadOnly; 693 continue; 694 } 695 ReadAttr = K; 696 break; 697 } 698 699 if (ReadAttr != Attribute::None) { 700 AttrBuilder B, R; 701 B.addAttribute(ReadAttr); 702 R.addAttribute(Attribute::ReadOnly).addAttribute(Attribute::ReadNone); 703 for (unsigned i = 0, e = ArgumentSCC.size(); i != e; ++i) { 704 Argument *A = ArgumentSCC[i]->Definition; 705 // Clear out existing readonly/readnone attributes 706 A->removeAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, R)); 707 A->addAttr(AttributeSet::get(A->getContext(), A->getArgNo() + 1, B)); 708 ReadAttr == Attribute::ReadOnly ? ++NumReadOnlyArg : ++NumReadNoneArg; 709 Changed = true; 710 } 711 } 712 } 713 714 return Changed; 715 } 716 717 /// Tests whether a function is "malloc-like". 718 /// 719 /// A function is "malloc-like" if it returns either null or a pointer that 720 /// doesn't alias any other pointer visible to the caller. 721 static bool isFunctionMallocLike(Function *F, const SCCNodeSet &SCCNodes) { 722 SmallSetVector<Value *, 8> FlowsToReturn; 723 for (BasicBlock &BB : *F) 724 if (ReturnInst *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) 725 FlowsToReturn.insert(Ret->getReturnValue()); 726 727 for (unsigned i = 0; i != FlowsToReturn.size(); ++i) { 728 Value *RetVal = FlowsToReturn[i]; 729 730 if (Constant *C = dyn_cast<Constant>(RetVal)) { 731 if (!C->isNullValue() && !isa<UndefValue>(C)) 732 return false; 733 734 continue; 735 } 736 737 if (isa<Argument>(RetVal)) 738 return false; 739 740 if (Instruction *RVI = dyn_cast<Instruction>(RetVal)) 741 switch (RVI->getOpcode()) { 742 // Extend the analysis by looking upwards. 743 case Instruction::BitCast: 744 case Instruction::GetElementPtr: 745 case Instruction::AddrSpaceCast: 746 FlowsToReturn.insert(RVI->getOperand(0)); 747 continue; 748 case Instruction::Select: { 749 SelectInst *SI = cast<SelectInst>(RVI); 750 FlowsToReturn.insert(SI->getTrueValue()); 751 FlowsToReturn.insert(SI->getFalseValue()); 752 continue; 753 } 754 case Instruction::PHI: { 755 PHINode *PN = cast<PHINode>(RVI); 756 for (Value *IncValue : PN->incoming_values()) 757 FlowsToReturn.insert(IncValue); 758 continue; 759 } 760 761 // Check whether the pointer came from an allocation. 762 case Instruction::Alloca: 763 break; 764 case Instruction::Call: 765 case Instruction::Invoke: { 766 CallSite CS(RVI); 767 if (CS.paramHasAttr(0, Attribute::NoAlias)) 768 break; 769 if (CS.getCalledFunction() && SCCNodes.count(CS.getCalledFunction())) 770 break; 771 LLVM_FALLTHROUGH; 772 } 773 default: 774 return false; // Did not come from an allocation. 775 } 776 777 if (PointerMayBeCaptured(RetVal, false, /*StoreCaptures=*/false)) 778 return false; 779 } 780 781 return true; 782 } 783 784 /// Deduce noalias attributes for the SCC. 785 static bool addNoAliasAttrs(const SCCNodeSet &SCCNodes) { 786 // Check each function in turn, determining which functions return noalias 787 // pointers. 788 for (Function *F : SCCNodes) { 789 // Already noalias. 790 if (F->doesNotAlias(0)) 791 continue; 792 793 // We can infer and propagate function attributes only when we know that the 794 // definition we'll get at link time is *exactly* the definition we see now. 795 // For more details, see GlobalValue::mayBeDerefined. 796 if (!F->hasExactDefinition()) 797 return false; 798 799 // We annotate noalias return values, which are only applicable to 800 // pointer types. 801 if (!F->getReturnType()->isPointerTy()) 802 continue; 803 804 if (!isFunctionMallocLike(F, SCCNodes)) 805 return false; 806 } 807 808 bool MadeChange = false; 809 for (Function *F : SCCNodes) { 810 if (F->doesNotAlias(0) || !F->getReturnType()->isPointerTy()) 811 continue; 812 813 F->setDoesNotAlias(0); 814 ++NumNoAlias; 815 MadeChange = true; 816 } 817 818 return MadeChange; 819 } 820 821 /// Tests whether this function is known to not return null. 822 /// 823 /// Requires that the function returns a pointer. 824 /// 825 /// Returns true if it believes the function will not return a null, and sets 826 /// \p Speculative based on whether the returned conclusion is a speculative 827 /// conclusion due to SCC calls. 828 static bool isReturnNonNull(Function *F, const SCCNodeSet &SCCNodes, 829 bool &Speculative) { 830 assert(F->getReturnType()->isPointerTy() && 831 "nonnull only meaningful on pointer types"); 832 Speculative = false; 833 834 SmallSetVector<Value *, 8> FlowsToReturn; 835 for (BasicBlock &BB : *F) 836 if (auto *Ret = dyn_cast<ReturnInst>(BB.getTerminator())) 837 FlowsToReturn.insert(Ret->getReturnValue()); 838 839 for (unsigned i = 0; i != FlowsToReturn.size(); ++i) { 840 Value *RetVal = FlowsToReturn[i]; 841 842 // If this value is locally known to be non-null, we're good 843 if (isKnownNonNull(RetVal)) 844 continue; 845 846 // Otherwise, we need to look upwards since we can't make any local 847 // conclusions. 848 Instruction *RVI = dyn_cast<Instruction>(RetVal); 849 if (!RVI) 850 return false; 851 switch (RVI->getOpcode()) { 852 // Extend the analysis by looking upwards. 853 case Instruction::BitCast: 854 case Instruction::GetElementPtr: 855 case Instruction::AddrSpaceCast: 856 FlowsToReturn.insert(RVI->getOperand(0)); 857 continue; 858 case Instruction::Select: { 859 SelectInst *SI = cast<SelectInst>(RVI); 860 FlowsToReturn.insert(SI->getTrueValue()); 861 FlowsToReturn.insert(SI->getFalseValue()); 862 continue; 863 } 864 case Instruction::PHI: { 865 PHINode *PN = cast<PHINode>(RVI); 866 for (int i = 0, e = PN->getNumIncomingValues(); i != e; ++i) 867 FlowsToReturn.insert(PN->getIncomingValue(i)); 868 continue; 869 } 870 case Instruction::Call: 871 case Instruction::Invoke: { 872 CallSite CS(RVI); 873 Function *Callee = CS.getCalledFunction(); 874 // A call to a node within the SCC is assumed to return null until 875 // proven otherwise 876 if (Callee && SCCNodes.count(Callee)) { 877 Speculative = true; 878 continue; 879 } 880 return false; 881 } 882 default: 883 return false; // Unknown source, may be null 884 }; 885 llvm_unreachable("should have either continued or returned"); 886 } 887 888 return true; 889 } 890 891 /// Deduce nonnull attributes for the SCC. 892 static bool addNonNullAttrs(const SCCNodeSet &SCCNodes) { 893 // Speculative that all functions in the SCC return only nonnull 894 // pointers. We may refute this as we analyze functions. 895 bool SCCReturnsNonNull = true; 896 897 bool MadeChange = false; 898 899 // Check each function in turn, determining which functions return nonnull 900 // pointers. 901 for (Function *F : SCCNodes) { 902 // Already nonnull. 903 if (F->getAttributes().hasAttribute(AttributeSet::ReturnIndex, 904 Attribute::NonNull)) 905 continue; 906 907 // We can infer and propagate function attributes only when we know that the 908 // definition we'll get at link time is *exactly* the definition we see now. 909 // For more details, see GlobalValue::mayBeDerefined. 910 if (!F->hasExactDefinition()) 911 return false; 912 913 // We annotate nonnull return values, which are only applicable to 914 // pointer types. 915 if (!F->getReturnType()->isPointerTy()) 916 continue; 917 918 bool Speculative = false; 919 if (isReturnNonNull(F, SCCNodes, Speculative)) { 920 if (!Speculative) { 921 // Mark the function eagerly since we may discover a function 922 // which prevents us from speculating about the entire SCC 923 DEBUG(dbgs() << "Eagerly marking " << F->getName() << " as nonnull\n"); 924 F->addAttribute(AttributeSet::ReturnIndex, Attribute::NonNull); 925 ++NumNonNullReturn; 926 MadeChange = true; 927 } 928 continue; 929 } 930 // At least one function returns something which could be null, can't 931 // speculate any more. 932 SCCReturnsNonNull = false; 933 } 934 935 if (SCCReturnsNonNull) { 936 for (Function *F : SCCNodes) { 937 if (F->getAttributes().hasAttribute(AttributeSet::ReturnIndex, 938 Attribute::NonNull) || 939 !F->getReturnType()->isPointerTy()) 940 continue; 941 942 DEBUG(dbgs() << "SCC marking " << F->getName() << " as nonnull\n"); 943 F->addAttribute(AttributeSet::ReturnIndex, Attribute::NonNull); 944 ++NumNonNullReturn; 945 MadeChange = true; 946 } 947 } 948 949 return MadeChange; 950 } 951 952 /// Remove the convergent attribute from all functions in the SCC if every 953 /// callsite within the SCC is not convergent (except for calls to functions 954 /// within the SCC). Returns true if changes were made. 955 static bool removeConvergentAttrs(const SCCNodeSet &SCCNodes) { 956 // For every function in SCC, ensure that either 957 // * it is not convergent, or 958 // * we can remove its convergent attribute. 959 bool HasConvergentFn = false; 960 for (Function *F : SCCNodes) { 961 if (!F->isConvergent()) continue; 962 HasConvergentFn = true; 963 964 // Can't remove convergent from function declarations. 965 if (F->isDeclaration()) return false; 966 967 // Can't remove convergent if any of our functions has a convergent call to a 968 // function not in the SCC. 969 for (Instruction &I : instructions(*F)) { 970 CallSite CS(&I); 971 // Bail if CS is a convergent call to a function not in the SCC. 972 if (CS && CS.isConvergent() && 973 SCCNodes.count(CS.getCalledFunction()) == 0) 974 return false; 975 } 976 } 977 978 // If the SCC doesn't have any convergent functions, we have nothing to do. 979 if (!HasConvergentFn) return false; 980 981 // If we got here, all of the calls the SCC makes to functions not in the SCC 982 // are non-convergent. Therefore all of the SCC's functions can also be made 983 // non-convergent. We'll remove the attr from the callsites in 984 // InstCombineCalls. 985 for (Function *F : SCCNodes) { 986 if (!F->isConvergent()) continue; 987 988 DEBUG(dbgs() << "Removing convergent attr from fn " << F->getName() 989 << "\n"); 990 F->setNotConvergent(); 991 } 992 return true; 993 } 994 995 static bool setDoesNotRecurse(Function &F) { 996 if (F.doesNotRecurse()) 997 return false; 998 F.setDoesNotRecurse(); 999 ++NumNoRecurse; 1000 return true; 1001 } 1002 1003 static bool addNoRecurseAttrs(const SCCNodeSet &SCCNodes) { 1004 // Try and identify functions that do not recurse. 1005 1006 // If the SCC contains multiple nodes we know for sure there is recursion. 1007 if (SCCNodes.size() != 1) 1008 return false; 1009 1010 Function *F = *SCCNodes.begin(); 1011 if (!F || F->isDeclaration() || F->doesNotRecurse()) 1012 return false; 1013 1014 // If all of the calls in F are identifiable and are to norecurse functions, F 1015 // is norecurse. This check also detects self-recursion as F is not currently 1016 // marked norecurse, so any called from F to F will not be marked norecurse. 1017 for (Instruction &I : instructions(*F)) 1018 if (auto CS = CallSite(&I)) { 1019 Function *Callee = CS.getCalledFunction(); 1020 if (!Callee || Callee == F || !Callee->doesNotRecurse()) 1021 // Function calls a potentially recursive function. 1022 return false; 1023 } 1024 1025 // Every call was to a non-recursive function other than this function, and 1026 // we have no indirect recursion as the SCC size is one. This function cannot 1027 // recurse. 1028 return setDoesNotRecurse(*F); 1029 } 1030 1031 PreservedAnalyses PostOrderFunctionAttrsPass::run(LazyCallGraph::SCC &C, 1032 CGSCCAnalysisManager &AM, 1033 LazyCallGraph &CG, 1034 CGSCCUpdateResult &) { 1035 FunctionAnalysisManager &FAM = 1036 AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager(); 1037 1038 // We pass a lambda into functions to wire them up to the analysis manager 1039 // for getting function analyses. 1040 auto AARGetter = [&](Function &F) -> AAResults & { 1041 return FAM.getResult<AAManager>(F); 1042 }; 1043 1044 // Fill SCCNodes with the elements of the SCC. Also track whether there are 1045 // any external or opt-none nodes that will prevent us from optimizing any 1046 // part of the SCC. 1047 SCCNodeSet SCCNodes; 1048 bool HasUnknownCall = false; 1049 for (LazyCallGraph::Node &N : C) { 1050 Function &F = N.getFunction(); 1051 if (F.hasFnAttribute(Attribute::OptimizeNone)) { 1052 // Treat any function we're trying not to optimize as if it were an 1053 // indirect call and omit it from the node set used below. 1054 HasUnknownCall = true; 1055 continue; 1056 } 1057 // Track whether any functions in this SCC have an unknown call edge. 1058 // Note: if this is ever a performance hit, we can common it with 1059 // subsequent routines which also do scans over the instructions of the 1060 // function. 1061 if (!HasUnknownCall) 1062 for (Instruction &I : instructions(F)) 1063 if (auto CS = CallSite(&I)) 1064 if (!CS.getCalledFunction()) { 1065 HasUnknownCall = true; 1066 break; 1067 } 1068 1069 SCCNodes.insert(&F); 1070 } 1071 1072 bool Changed = false; 1073 Changed |= addArgumentReturnedAttrs(SCCNodes); 1074 Changed |= addReadAttrs(SCCNodes, AARGetter); 1075 Changed |= addArgumentAttrs(SCCNodes); 1076 1077 // If we have no external nodes participating in the SCC, we can deduce some 1078 // more precise attributes as well. 1079 if (!HasUnknownCall) { 1080 Changed |= addNoAliasAttrs(SCCNodes); 1081 Changed |= addNonNullAttrs(SCCNodes); 1082 Changed |= removeConvergentAttrs(SCCNodes); 1083 Changed |= addNoRecurseAttrs(SCCNodes); 1084 } 1085 1086 return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all(); 1087 } 1088 1089 namespace { 1090 struct PostOrderFunctionAttrsLegacyPass : public CallGraphSCCPass { 1091 static char ID; // Pass identification, replacement for typeid 1092 PostOrderFunctionAttrsLegacyPass() : CallGraphSCCPass(ID) { 1093 initializePostOrderFunctionAttrsLegacyPassPass(*PassRegistry::getPassRegistry()); 1094 } 1095 1096 bool runOnSCC(CallGraphSCC &SCC) override; 1097 1098 void getAnalysisUsage(AnalysisUsage &AU) const override { 1099 AU.setPreservesCFG(); 1100 AU.addRequired<AssumptionCacheTracker>(); 1101 getAAResultsAnalysisUsage(AU); 1102 CallGraphSCCPass::getAnalysisUsage(AU); 1103 } 1104 }; 1105 } 1106 1107 char PostOrderFunctionAttrsLegacyPass::ID = 0; 1108 INITIALIZE_PASS_BEGIN(PostOrderFunctionAttrsLegacyPass, "functionattrs", 1109 "Deduce function attributes", false, false) 1110 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) 1111 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass) 1112 INITIALIZE_PASS_END(PostOrderFunctionAttrsLegacyPass, "functionattrs", 1113 "Deduce function attributes", false, false) 1114 1115 Pass *llvm::createPostOrderFunctionAttrsLegacyPass() { return new PostOrderFunctionAttrsLegacyPass(); } 1116 1117 template <typename AARGetterT> 1118 static bool runImpl(CallGraphSCC &SCC, AARGetterT AARGetter) { 1119 bool Changed = false; 1120 1121 // Fill SCCNodes with the elements of the SCC. Used for quickly looking up 1122 // whether a given CallGraphNode is in this SCC. Also track whether there are 1123 // any external or opt-none nodes that will prevent us from optimizing any 1124 // part of the SCC. 1125 SCCNodeSet SCCNodes; 1126 bool ExternalNode = false; 1127 for (CallGraphNode *I : SCC) { 1128 Function *F = I->getFunction(); 1129 if (!F || F->hasFnAttribute(Attribute::OptimizeNone)) { 1130 // External node or function we're trying not to optimize - we both avoid 1131 // transform them and avoid leveraging information they provide. 1132 ExternalNode = true; 1133 continue; 1134 } 1135 1136 SCCNodes.insert(F); 1137 } 1138 1139 Changed |= addArgumentReturnedAttrs(SCCNodes); 1140 Changed |= addReadAttrs(SCCNodes, AARGetter); 1141 Changed |= addArgumentAttrs(SCCNodes); 1142 1143 // If we have no external nodes participating in the SCC, we can deduce some 1144 // more precise attributes as well. 1145 if (!ExternalNode) { 1146 Changed |= addNoAliasAttrs(SCCNodes); 1147 Changed |= addNonNullAttrs(SCCNodes); 1148 Changed |= removeConvergentAttrs(SCCNodes); 1149 Changed |= addNoRecurseAttrs(SCCNodes); 1150 } 1151 1152 return Changed; 1153 } 1154 1155 bool PostOrderFunctionAttrsLegacyPass::runOnSCC(CallGraphSCC &SCC) { 1156 if (skipSCC(SCC)) 1157 return false; 1158 1159 // We compute dedicated AA results for each function in the SCC as needed. We 1160 // use a lambda referencing external objects so that they live long enough to 1161 // be queried, but we re-use them each time. 1162 Optional<BasicAAResult> BAR; 1163 Optional<AAResults> AAR; 1164 auto AARGetter = [&](Function &F) -> AAResults & { 1165 BAR.emplace(createLegacyPMBasicAAResult(*this, F)); 1166 AAR.emplace(createLegacyPMAAResults(*this, F, *BAR)); 1167 return *AAR; 1168 }; 1169 1170 return runImpl(SCC, AARGetter); 1171 } 1172 1173 namespace { 1174 struct ReversePostOrderFunctionAttrsLegacyPass : public ModulePass { 1175 static char ID; // Pass identification, replacement for typeid 1176 ReversePostOrderFunctionAttrsLegacyPass() : ModulePass(ID) { 1177 initializeReversePostOrderFunctionAttrsLegacyPassPass(*PassRegistry::getPassRegistry()); 1178 } 1179 1180 bool runOnModule(Module &M) override; 1181 1182 void getAnalysisUsage(AnalysisUsage &AU) const override { 1183 AU.setPreservesCFG(); 1184 AU.addRequired<CallGraphWrapperPass>(); 1185 AU.addPreserved<CallGraphWrapperPass>(); 1186 } 1187 }; 1188 } 1189 1190 char ReversePostOrderFunctionAttrsLegacyPass::ID = 0; 1191 INITIALIZE_PASS_BEGIN(ReversePostOrderFunctionAttrsLegacyPass, "rpo-functionattrs", 1192 "Deduce function attributes in RPO", false, false) 1193 INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass) 1194 INITIALIZE_PASS_END(ReversePostOrderFunctionAttrsLegacyPass, "rpo-functionattrs", 1195 "Deduce function attributes in RPO", false, false) 1196 1197 Pass *llvm::createReversePostOrderFunctionAttrsPass() { 1198 return new ReversePostOrderFunctionAttrsLegacyPass(); 1199 } 1200 1201 static bool addNoRecurseAttrsTopDown(Function &F) { 1202 // We check the preconditions for the function prior to calling this to avoid 1203 // the cost of building up a reversible post-order list. We assert them here 1204 // to make sure none of the invariants this relies on were violated. 1205 assert(!F.isDeclaration() && "Cannot deduce norecurse without a definition!"); 1206 assert(!F.doesNotRecurse() && 1207 "This function has already been deduced as norecurs!"); 1208 assert(F.hasInternalLinkage() && 1209 "Can only do top-down deduction for internal linkage functions!"); 1210 1211 // If F is internal and all of its uses are calls from a non-recursive 1212 // functions, then none of its calls could in fact recurse without going 1213 // through a function marked norecurse, and so we can mark this function too 1214 // as norecurse. Note that the uses must actually be calls -- otherwise 1215 // a pointer to this function could be returned from a norecurse function but 1216 // this function could be recursively (indirectly) called. Note that this 1217 // also detects if F is directly recursive as F is not yet marked as 1218 // a norecurse function. 1219 for (auto *U : F.users()) { 1220 auto *I = dyn_cast<Instruction>(U); 1221 if (!I) 1222 return false; 1223 CallSite CS(I); 1224 if (!CS || !CS.getParent()->getParent()->doesNotRecurse()) 1225 return false; 1226 } 1227 return setDoesNotRecurse(F); 1228 } 1229 1230 static bool deduceFunctionAttributeInRPO(Module &M, CallGraph &CG) { 1231 // We only have a post-order SCC traversal (because SCCs are inherently 1232 // discovered in post-order), so we accumulate them in a vector and then walk 1233 // it in reverse. This is simpler than using the RPO iterator infrastructure 1234 // because we need to combine SCC detection and the PO walk of the call 1235 // graph. We can also cheat egregiously because we're primarily interested in 1236 // synthesizing norecurse and so we can only save the singular SCCs as SCCs 1237 // with multiple functions in them will clearly be recursive. 1238 SmallVector<Function *, 16> Worklist; 1239 for (scc_iterator<CallGraph *> I = scc_begin(&CG); !I.isAtEnd(); ++I) { 1240 if (I->size() != 1) 1241 continue; 1242 1243 Function *F = I->front()->getFunction(); 1244 if (F && !F->isDeclaration() && !F->doesNotRecurse() && 1245 F->hasInternalLinkage()) 1246 Worklist.push_back(F); 1247 } 1248 1249 bool Changed = false; 1250 for (auto *F : reverse(Worklist)) 1251 Changed |= addNoRecurseAttrsTopDown(*F); 1252 1253 return Changed; 1254 } 1255 1256 bool ReversePostOrderFunctionAttrsLegacyPass::runOnModule(Module &M) { 1257 if (skipModule(M)) 1258 return false; 1259 1260 auto &CG = getAnalysis<CallGraphWrapperPass>().getCallGraph(); 1261 1262 return deduceFunctionAttributeInRPO(M, CG); 1263 } 1264 1265 PreservedAnalyses 1266 ReversePostOrderFunctionAttrsPass::run(Module &M, ModuleAnalysisManager &AM) { 1267 auto &CG = AM.getResult<CallGraphAnalysis>(M); 1268 1269 bool Changed = deduceFunctionAttributeInRPO(M, CG); 1270 1271 // CallGraphAnalysis holds AssertingVH and must be invalidated eagerly so 1272 // that other passes don't delete stuff from under it. 1273 // FIXME: We need to invalidate this to avoid PR28400. Is there a better 1274 // solution? 1275 AM.invalidate<CallGraphAnalysis>(M); 1276 1277 if (!Changed) 1278 return PreservedAnalyses::all(); 1279 PreservedAnalyses PA; 1280 PA.preserve<CallGraphAnalysis>(); 1281 return PA; 1282 } 1283