1 //===- Attributor.cpp - Module-wide attribute deduction -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements an inter procedural pass that deduces and/or propagating
10 // attributes. This is done in an abstract interpretation style fixpoint
11 // iteration. See the Attributor.h file comment and the class descriptions in
12 // that file for more information.
13 //
14 //===----------------------------------------------------------------------===//
15 
16 #include "llvm/Transforms/IPO/Attributor.h"
17 
18 #include "llvm/ADT/DepthFirstIterator.h"
19 #include "llvm/ADT/STLExtras.h"
20 #include "llvm/ADT/SetVector.h"
21 #include "llvm/ADT/SmallPtrSet.h"
22 #include "llvm/ADT/SmallVector.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/Analysis/CaptureTracking.h"
25 #include "llvm/Analysis/EHPersonalities.h"
26 #include "llvm/Analysis/GlobalsModRef.h"
27 #include "llvm/Analysis/Loads.h"
28 #include "llvm/Analysis/ValueTracking.h"
29 #include "llvm/IR/Argument.h"
30 #include "llvm/IR/Attributes.h"
31 #include "llvm/IR/CFG.h"
32 #include "llvm/IR/InstIterator.h"
33 #include "llvm/IR/IntrinsicInst.h"
34 #include "llvm/Support/CommandLine.h"
35 #include "llvm/Support/Debug.h"
36 #include "llvm/Support/raw_ostream.h"
37 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
38 #include "llvm/Transforms/Utils/Local.h"
39 
40 #include <cassert>
41 
42 using namespace llvm;
43 
44 #define DEBUG_TYPE "attributor"
45 
46 STATISTIC(NumFnWithExactDefinition,
47           "Number of function with exact definitions");
48 STATISTIC(NumFnWithoutExactDefinition,
49           "Number of function without exact definitions");
50 STATISTIC(NumAttributesTimedOut,
51           "Number of abstract attributes timed out before fixpoint");
52 STATISTIC(NumAttributesValidFixpoint,
53           "Number of abstract attributes in a valid fixpoint state");
54 STATISTIC(NumAttributesManifested,
55           "Number of abstract attributes manifested in IR");
56 
57 // Some helper macros to deal with statistics tracking.
58 //
59 // Usage:
60 // For simple IR attribute tracking overload trackStatistics in the abstract
61 // attribute and choose the right STATS_DECL_AND_TRACK_********* macro,
62 // e.g.,:
63 //  void trackStatistics() const override {
64 //    STATS_DECL_AND_TRACK_ARG_ATTR(returned)
65 //  }
66 // If there is a single "increment" side one can use the macro
67 // STATS_DECL_AND_TRACK with a custom message. If there are multiple increment
68 // sides, STATS_DECL and STATS_TRACK can also be used separatly.
69 //
70 #define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME)                                     \
71   ("Number of " #TYPE " marked '" #NAME "'")
72 #define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
73 #define STATS_DECL(NAME, TYPE, MSG) STATISTIC(BUILD_STAT_NAME(NAME, TYPE), MSG);
74 #define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
75 #define STATS_DECL_AND_TRACK(NAME, TYPE, MSG)                                  \
76   STATS_DECL(NAME, TYPE, MSG)                                                  \
77   STATS_TRACK(NAME, TYPE)
78 #define STATS_DECL_AND_TRACK_ARG_ATTR(NAME)                                    \
79   STATS_DECL_AND_TRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
80 #define STATS_DECL_AND_TRACK_CSARG_ATTR(NAME)                                  \
81   STATS_DECL_AND_TRACK(NAME, CSArguments,                                      \
82                        BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
83 #define STATS_DECL_AND_TRACK_FN_ATTR(NAME)                                     \
84   STATS_DECL_AND_TRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
85 #define STATS_DECL_AND_TRACK_FNRET_ATTR(NAME)                                  \
86   STATS_DECL_AND_TRACK(NAME, FunctionReturn,                                   \
87                        BUILD_STAT_MSG_IR_ATTR(function returns, NAME));
88 
89 // TODO: Determine a good default value.
90 //
91 // In the LLVM-TS and SPEC2006, 32 seems to not induce compile time overheads
92 // (when run with the first 5 abstract attributes). The results also indicate
93 // that we never reach 32 iterations but always find a fixpoint sooner.
94 //
95 // This will become more evolved once we perform two interleaved fixpoint
96 // iterations: bottom-up and top-down.
97 static cl::opt<unsigned>
98     MaxFixpointIterations("attributor-max-iterations", cl::Hidden,
99                           cl::desc("Maximal number of fixpoint iterations."),
100                           cl::init(32));
101 
102 static cl::opt<bool> DisableAttributor(
103     "attributor-disable", cl::Hidden,
104     cl::desc("Disable the attributor inter-procedural deduction pass."),
105     cl::init(true));
106 
107 static cl::opt<bool> VerifyAttributor(
108     "attributor-verify", cl::Hidden,
109     cl::desc("Verify the Attributor deduction and "
110              "manifestation of attributes -- may issue false-positive errors"),
111     cl::init(false));
112 
113 /// Logic operators for the change status enum class.
114 ///
115 ///{
116 ChangeStatus llvm::operator|(ChangeStatus l, ChangeStatus r) {
117   return l == ChangeStatus::CHANGED ? l : r;
118 }
119 ChangeStatus llvm::operator&(ChangeStatus l, ChangeStatus r) {
120   return l == ChangeStatus::UNCHANGED ? l : r;
121 }
122 ///}
123 
124 template <typename StateTy>
125 using followValueCB_t = std::function<bool(Value *, StateTy &State)>;
126 template <typename StateTy>
127 using visitValueCB_t = std::function<void(Value *, StateTy &State)>;
128 
129 /// Recursively visit all values that might become \p InitV at some point. This
130 /// will be done by looking through cast instructions, selects, phis, and calls
131 /// with the "returned" attribute. The callback \p FollowValueCB is asked before
132 /// a potential origin value is looked at. If no \p FollowValueCB is passed, a
133 /// default one is used that will make sure we visit every value only once. Once
134 /// we cannot look through the value any further, the callback \p VisitValueCB
135 /// is invoked and passed the current value and the \p State. To limit how much
136 /// effort is invested, we will never visit more than \p MaxValues values.
137 template <typename StateTy>
138 static bool genericValueTraversal(
139     Value *InitV, StateTy &State, visitValueCB_t<StateTy> &VisitValueCB,
140     followValueCB_t<StateTy> *FollowValueCB = nullptr, int MaxValues = 8) {
141 
142   SmallPtrSet<Value *, 16> Visited;
143   followValueCB_t<bool> DefaultFollowValueCB = [&](Value *Val, bool &) {
144     return Visited.insert(Val).second;
145   };
146 
147   if (!FollowValueCB)
148     FollowValueCB = &DefaultFollowValueCB;
149 
150   SmallVector<Value *, 16> Worklist;
151   Worklist.push_back(InitV);
152 
153   int Iteration = 0;
154   do {
155     Value *V = Worklist.pop_back_val();
156 
157     // Check if we should process the current value. To prevent endless
158     // recursion keep a record of the values we followed!
159     if (!(*FollowValueCB)(V, State))
160       continue;
161 
162     // Make sure we limit the compile time for complex expressions.
163     if (Iteration++ >= MaxValues)
164       return false;
165 
166     // Explicitly look through calls with a "returned" attribute if we do
167     // not have a pointer as stripPointerCasts only works on them.
168     if (V->getType()->isPointerTy()) {
169       V = V->stripPointerCasts();
170     } else {
171       CallSite CS(V);
172       if (CS && CS.getCalledFunction()) {
173         Value *NewV = nullptr;
174         for (Argument &Arg : CS.getCalledFunction()->args())
175           if (Arg.hasReturnedAttr()) {
176             NewV = CS.getArgOperand(Arg.getArgNo());
177             break;
178           }
179         if (NewV) {
180           Worklist.push_back(NewV);
181           continue;
182         }
183       }
184     }
185 
186     // Look through select instructions, visit both potential values.
187     if (auto *SI = dyn_cast<SelectInst>(V)) {
188       Worklist.push_back(SI->getTrueValue());
189       Worklist.push_back(SI->getFalseValue());
190       continue;
191     }
192 
193     // Look through phi nodes, visit all operands.
194     if (auto *PHI = dyn_cast<PHINode>(V)) {
195       Worklist.append(PHI->op_begin(), PHI->op_end());
196       continue;
197     }
198 
199     // Once a leaf is reached we inform the user through the callback.
200     VisitValueCB(V, State);
201   } while (!Worklist.empty());
202 
203   // All values have been visited.
204   return true;
205 }
206 
207 /// Return true if \p New is equal or worse than \p Old.
208 static bool isEqualOrWorse(const Attribute &New, const Attribute &Old) {
209   if (!Old.isIntAttribute())
210     return true;
211 
212   return Old.getValueAsInt() >= New.getValueAsInt();
213 }
214 
215 /// Return true if the information provided by \p Attr was added to the
216 /// attribute list \p Attrs. This is only the case if it was not already present
217 /// in \p Attrs at the position describe by \p PK and \p AttrIdx.
218 static bool addIfNotExistent(LLVMContext &Ctx, const Attribute &Attr,
219                              AttributeList &Attrs, int AttrIdx) {
220 
221   if (Attr.isEnumAttribute()) {
222     Attribute::AttrKind Kind = Attr.getKindAsEnum();
223     if (Attrs.hasAttribute(AttrIdx, Kind))
224       if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind)))
225         return false;
226     Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr);
227     return true;
228   }
229   if (Attr.isStringAttribute()) {
230     StringRef Kind = Attr.getKindAsString();
231     if (Attrs.hasAttribute(AttrIdx, Kind))
232       if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind)))
233         return false;
234     Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr);
235     return true;
236   }
237   if (Attr.isIntAttribute()) {
238     Attribute::AttrKind Kind = Attr.getKindAsEnum();
239     if (Attrs.hasAttribute(AttrIdx, Kind))
240       if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind)))
241         return false;
242     Attrs = Attrs.removeAttribute(Ctx, AttrIdx, Kind);
243     Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr);
244     return true;
245   }
246 
247   llvm_unreachable("Expected enum or string attribute!");
248 }
249 
250 ChangeStatus AbstractAttribute::update(Attributor &A) {
251   ChangeStatus HasChanged = ChangeStatus::UNCHANGED;
252   if (getState().isAtFixpoint())
253     return HasChanged;
254 
255   LLVM_DEBUG(dbgs() << "[Attributor] Update: " << *this << "\n");
256 
257   HasChanged = updateImpl(A);
258 
259   LLVM_DEBUG(dbgs() << "[Attributor] Update " << HasChanged << " " << *this
260                     << "\n");
261 
262   return HasChanged;
263 }
264 
265 ChangeStatus
266 IRAttributeManifest::manifestAttrs(Attributor &A, IRPosition &IRP,
267                                    const ArrayRef<Attribute> &DeducedAttrs) {
268   assert(IRP.getAssociatedValue() &&
269          "Attempted to manifest an attribute without associated value!");
270 
271   ChangeStatus HasChanged = ChangeStatus::UNCHANGED;
272 
273   Function &ScopeFn = IRP.getAnchorScope();
274   LLVMContext &Ctx = ScopeFn.getContext();
275   IRPosition::Kind PK = IRP.getPositionKind();
276 
277   // In the following some generic code that will manifest attributes in
278   // DeducedAttrs if they improve the current IR. Due to the different
279   // annotation positions we use the underlying AttributeList interface.
280 
281   AttributeList Attrs;
282   switch (PK) {
283   case IRPosition::IRP_ARGUMENT:
284   case IRPosition::IRP_FUNCTION:
285   case IRPosition::IRP_RETURNED:
286     Attrs = ScopeFn.getAttributes();
287     break;
288   case IRPosition::IRP_CALL_SITE_ARGUMENT:
289     Attrs = ImmutableCallSite(&IRP.getAnchorValue()).getAttributes();
290     break;
291   }
292 
293   for (const Attribute &Attr : DeducedAttrs) {
294     if (!addIfNotExistent(Ctx, Attr, Attrs, IRP.getAttrIdx()))
295       continue;
296 
297     HasChanged = ChangeStatus::CHANGED;
298   }
299 
300   if (HasChanged == ChangeStatus::UNCHANGED)
301     return HasChanged;
302 
303   switch (PK) {
304   case IRPosition::IRP_ARGUMENT:
305   case IRPosition::IRP_FUNCTION:
306   case IRPosition::IRP_RETURNED:
307     ScopeFn.setAttributes(Attrs);
308     break;
309   case IRPosition::IRP_CALL_SITE_ARGUMENT:
310     CallSite(&IRP.getAnchorValue()).setAttributes(Attrs);
311   }
312 
313   return HasChanged;
314 }
315 
316 /// -----------------------NoUnwind Function Attribute--------------------------
317 
318 struct AANoUnwindImpl : AANoUnwind {
319   IRPositionConstructorForward(AANoUnwindImpl, AANoUnwind);
320 
321   const std::string getAsStr() const override {
322     return getAssumed() ? "nounwind" : "may-unwind";
323   }
324 
325   /// See AbstractAttribute::updateImpl(...).
326   ChangeStatus updateImpl(Attributor &A) override;
327 };
328 
329 struct AANoUnwindFunction final : public AANoUnwindImpl {
330   AANoUnwindFunction(Function &F) : AANoUnwindImpl(F, IRP_FUNCTION) {}
331 
332   /// See AbstractAttribute::trackStatistics()
333   void trackStatistics() const override {
334     STATS_DECL_AND_TRACK_FN_ATTR(nounwind)
335   }
336 };
337 
338 ChangeStatus AANoUnwindImpl::updateImpl(Attributor &A) {
339   Function &F = getAnchorScope();
340 
341   // The map from instruction opcodes to those instructions in the function.
342   auto Opcodes = {
343       (unsigned)Instruction::Invoke,      (unsigned)Instruction::CallBr,
344       (unsigned)Instruction::Call,        (unsigned)Instruction::CleanupRet,
345       (unsigned)Instruction::CatchSwitch, (unsigned)Instruction::Resume};
346 
347   auto CheckForNoUnwind = [&](Instruction &I) {
348     if (!I.mayThrow())
349       return true;
350 
351     auto *NoUnwindAA = A.getAAFor<AANoUnwind>(*this, I);
352     return NoUnwindAA && NoUnwindAA->isAssumedNoUnwind();
353   };
354 
355   if (!A.checkForAllInstructions(F, CheckForNoUnwind, *this, Opcodes))
356     return indicatePessimisticFixpoint();
357 
358   return ChangeStatus::UNCHANGED;
359 }
360 
361 /// --------------------- Function Return Values -------------------------------
362 
363 /// "Attribute" that collects all potential returned values and the return
364 /// instructions that they arise from.
365 ///
366 /// If there is a unique returned value R, the manifest method will:
367 ///   - mark R with the "returned" attribute, if R is an argument.
368 ///
369 /// TODO: We should use liveness during construction of the returned values map
370 ///       and before we set HasOverdefinedReturnedCalls.
371 class AAReturnedValuesImpl : public AAReturnedValues, public AbstractState {
372 
373   /// Mapping of values potentially returned by the associated function to the
374   /// return instructions that might return them.
375   DenseMap<Value *, SmallPtrSet<ReturnInst *, 2>> ReturnedValues;
376 
377   /// State flags
378   ///
379   ///{
380   bool IsFixed;
381   bool IsValidState;
382   bool HasOverdefinedReturnedCalls;
383   ///}
384 
385   /// Collect values that could become \p V in the set \p Values, each mapped to
386   /// \p ReturnInsts.
387   void collectValuesRecursively(
388       Attributor &A, Value *V, SmallPtrSetImpl<ReturnInst *> &ReturnInsts,
389       DenseMap<Value *, SmallPtrSet<ReturnInst *, 2>> &Values) {
390 
391     visitValueCB_t<bool> VisitValueCB = [&](Value *Val, bool &) {
392       assert(!isa<Instruction>(Val) ||
393              &getAnchorScope() == cast<Instruction>(Val)->getFunction());
394       Values[Val].insert(ReturnInsts.begin(), ReturnInsts.end());
395     };
396 
397     bool UnusedBool;
398     bool Success = genericValueTraversal(V, UnusedBool, VisitValueCB);
399 
400     // If we did abort the above traversal we haven't see all the values.
401     // Consequently, we cannot know if the information we would derive is
402     // accurate so we give up early.
403     if (!Success)
404       indicatePessimisticFixpoint();
405   }
406 
407 public:
408   IRPositionConstructorForward(AAReturnedValuesImpl, AAReturnedValues);
409 
410   /// See AbstractAttribute::initialize(...).
411   void initialize(Attributor &A) override {
412     // Reset the state.
413     setAssociatedValue(nullptr);
414     IsFixed = false;
415     IsValidState = true;
416     HasOverdefinedReturnedCalls = false;
417     ReturnedValues.clear();
418 
419     Function &F = getAnchorScope();
420 
421     // The map from instruction opcodes to those instructions in the function.
422     auto &OpcodeInstMap = A.getInfoCache().getOpcodeInstMapForFunction(F);
423 
424     // Look through all arguments, if one is marked as returned we are done.
425     for (Argument &Arg : F.args()) {
426       if (Arg.hasReturnedAttr()) {
427 
428         auto &ReturnInstSet = ReturnedValues[&Arg];
429         for (Instruction *RI : OpcodeInstMap[Instruction::Ret])
430           ReturnInstSet.insert(cast<ReturnInst>(RI));
431 
432         indicateOptimisticFixpoint();
433         return;
434       }
435     }
436 
437     // If no argument was marked as returned we look at all return instructions
438     // and collect potentially returned values.
439     for (Instruction *RI : OpcodeInstMap[Instruction::Ret]) {
440       SmallPtrSet<ReturnInst *, 1> RISet({cast<ReturnInst>(RI)});
441       collectValuesRecursively(A, cast<ReturnInst>(RI)->getReturnValue(), RISet,
442                                ReturnedValues);
443     }
444   }
445 
446   /// See AbstractAttribute::manifest(...).
447   ChangeStatus manifest(Attributor &A) override;
448 
449   /// See AbstractAttribute::getState(...).
450   AbstractState &getState() override { return *this; }
451 
452   /// See AbstractAttribute::getState(...).
453   const AbstractState &getState() const override { return *this; }
454 
455   /// See AbstractAttribute::updateImpl(Attributor &A).
456   ChangeStatus updateImpl(Attributor &A) override;
457 
458   /// Return the number of potential return values, -1 if unknown.
459   size_t getNumReturnValues() const {
460     return isValidState() ? ReturnedValues.size() : -1;
461   }
462 
463   /// Return an assumed unique return value if a single candidate is found. If
464   /// there cannot be one, return a nullptr. If it is not clear yet, return the
465   /// Optional::NoneType.
466   Optional<Value *> getAssumedUniqueReturnValue(Attributor &A) const;
467 
468   /// See AbstractState::checkForAllReturnedValues(...).
469   bool checkForAllReturnedValuesAndReturnInsts(
470       const function_ref<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
471           &Pred) const override;
472 
473   /// Pretty print the attribute similar to the IR representation.
474   const std::string getAsStr() const override;
475 
476   /// See AbstractState::isAtFixpoint().
477   bool isAtFixpoint() const override { return IsFixed; }
478 
479   /// See AbstractState::isValidState().
480   bool isValidState() const override { return IsValidState; }
481 
482   /// See AbstractState::indicateOptimisticFixpoint(...).
483   ChangeStatus indicateOptimisticFixpoint() override {
484     IsFixed = true;
485     IsValidState &= true;
486     return ChangeStatus::UNCHANGED;
487   }
488 
489   ChangeStatus indicatePessimisticFixpoint() override {
490     IsFixed = true;
491     IsValidState = false;
492     return ChangeStatus::CHANGED;
493   }
494 };
495 
496 struct AAReturnedValuesFunction final : public AAReturnedValuesImpl {
497   AAReturnedValuesFunction(Function &F)
498       : AAReturnedValuesImpl(F, IRP_FUNCTION) {}
499 
500   /// See AbstractAttribute::trackStatistics()
501   void trackStatistics() const override {
502     STATS_DECL_AND_TRACK_ARG_ATTR(returned)
503   }
504 };
505 
506 ChangeStatus AAReturnedValuesImpl::manifest(Attributor &A) {
507   ChangeStatus Changed = ChangeStatus::UNCHANGED;
508 
509   // Bookkeeping.
510   assert(isValidState());
511   STATS_DECL_AND_TRACK(KnownReturnValues, FunctionReturn,
512                        "Number of function with known return values");
513 
514   // Check if we have an assumed unique return value that we could manifest.
515   Optional<Value *> UniqueRV = getAssumedUniqueReturnValue(A);
516 
517   if (!UniqueRV.hasValue() || !UniqueRV.getValue())
518     return Changed;
519 
520   // Bookkeeping.
521   STATS_DECL_AND_TRACK(UniqueReturnValue, FunctionReturn,
522                        "Number of function with unique return");
523 
524   // If the assumed unique return value is an argument, annotate it.
525   if (auto *UniqueRVArg = dyn_cast<Argument>(UniqueRV.getValue())) {
526     setAssociatedValue(UniqueRVArg);
527     setAttributeIdx(UniqueRVArg->getArgNo() + AttributeList::FirstArgIndex);
528     Changed = IRAttribute::manifest(A) | Changed;
529   }
530 
531   return Changed;
532 }
533 
534 const std::string AAReturnedValuesImpl::getAsStr() const {
535   return (isAtFixpoint() ? "returns(#" : "may-return(#") +
536          (isValidState() ? std::to_string(getNumReturnValues()) : "?") +
537          ")[OD: " + std::to_string(HasOverdefinedReturnedCalls) + "]";
538 }
539 
540 Optional<Value *>
541 AAReturnedValuesImpl::getAssumedUniqueReturnValue(Attributor &A) const {
542   // If checkForAllReturnedValues provides a unique value, ignoring potential
543   // undef values that can also be present, it is assumed to be the actual
544   // return value and forwarded to the caller of this method. If there are
545   // multiple, a nullptr is returned indicating there cannot be a unique
546   // returned value.
547   Optional<Value *> UniqueRV;
548 
549   auto Pred = [&](Value &RV) -> bool {
550     // If we found a second returned value and neither the current nor the saved
551     // one is an undef, there is no unique returned value. Undefs are special
552     // since we can pretend they have any value.
553     if (UniqueRV.hasValue() && UniqueRV != &RV &&
554         !(isa<UndefValue>(RV) || isa<UndefValue>(UniqueRV.getValue()))) {
555       UniqueRV = nullptr;
556       return false;
557     }
558 
559     // Do not overwrite a value with an undef.
560     if (!UniqueRV.hasValue() || !isa<UndefValue>(RV))
561       UniqueRV = &RV;
562 
563     return true;
564   };
565 
566   if (!A.checkForAllReturnedValues(getAnchorScope(), Pred, *this))
567     UniqueRV = nullptr;
568 
569   return UniqueRV;
570 }
571 
572 bool AAReturnedValuesImpl::checkForAllReturnedValuesAndReturnInsts(
573     const function_ref<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
574         &Pred) const {
575   if (!isValidState())
576     return false;
577 
578   // Check all returned values but ignore call sites as long as we have not
579   // encountered an overdefined one during an update.
580   for (auto &It : ReturnedValues) {
581     Value *RV = It.first;
582     const SmallPtrSetImpl<ReturnInst *> &RetInsts = It.second;
583 
584     ImmutableCallSite ICS(RV);
585     if (ICS && !HasOverdefinedReturnedCalls)
586       continue;
587 
588     if (!Pred(*RV, RetInsts))
589       return false;
590   }
591 
592   return true;
593 }
594 
595 ChangeStatus AAReturnedValuesImpl::updateImpl(Attributor &A) {
596 
597   // Check if we know of any values returned by the associated function,
598   // if not, we are done.
599   if (getNumReturnValues() == 0) {
600     indicateOptimisticFixpoint();
601     return ChangeStatus::UNCHANGED;
602   }
603 
604   // Check if any of the returned values is a call site we can refine.
605   decltype(ReturnedValues) AddRVs;
606   bool HasCallSite = false;
607 
608   // Keep track of any change to trigger updates on dependent attributes.
609   ChangeStatus Changed = ChangeStatus::UNCHANGED;
610 
611   auto *LivenessAA = A.getAAFor<AAIsDead>(*this, getAnchorScope());
612 
613   // Look at all returned call sites.
614   for (auto &It : ReturnedValues) {
615     SmallPtrSet<ReturnInst *, 2> &ReturnInsts = It.second;
616     Value *RV = It.first;
617 
618     LLVM_DEBUG(dbgs() << "[AAReturnedValues] Potentially returned value " << *RV
619                       << "\n");
620 
621     // Only call sites can change during an update, ignore the rest.
622     CallSite RetCS(RV);
623     if (!RetCS)
624       continue;
625 
626     // For now, any call site we see will prevent us from directly fixing the
627     // state. However, if the information on the callees is fixed, the call
628     // sites will be removed and we will fix the information for this state.
629     HasCallSite = true;
630 
631     // Ignore dead ReturnValues.
632     if (LivenessAA &&
633         !LivenessAA->isLiveInstSet(ReturnInsts.begin(), ReturnInsts.end())) {
634       LLVM_DEBUG(dbgs() << "[AAReturnedValues] all returns are assumed dead, "
635                            "skip it for now\n");
636       continue;
637     }
638 
639     // Try to find a assumed unique return value for the called function.
640     auto *RetCSAA = A.getAAFor<AAReturnedValuesImpl>(*this, *RV);
641     if (!RetCSAA) {
642       if (!HasOverdefinedReturnedCalls)
643         Changed = ChangeStatus::CHANGED;
644       HasOverdefinedReturnedCalls = true;
645       LLVM_DEBUG(dbgs() << "[AAReturnedValues] Returned call site (" << *RV
646                         << ") with " << (RetCSAA ? "invalid" : "no")
647                         << " associated state\n");
648       continue;
649     }
650 
651     // Try to find a assumed unique return value for the called function.
652     Optional<Value *> AssumedUniqueRV = RetCSAA->getAssumedUniqueReturnValue(A);
653 
654     // If no assumed unique return value was found due to the lack of
655     // candidates, we may need to resolve more calls (through more update
656     // iterations) or the called function will not return. Either way, we simply
657     // stick with the call sites as return values. Because there were not
658     // multiple possibilities, we do not treat it as overdefined.
659     if (!AssumedUniqueRV.hasValue())
660       continue;
661 
662     // If multiple, non-refinable values were found, there cannot be a unique
663     // return value for the called function. The returned call is overdefined!
664     if (!AssumedUniqueRV.getValue()) {
665       if (!HasOverdefinedReturnedCalls)
666         Changed = ChangeStatus::CHANGED;
667       HasOverdefinedReturnedCalls = true;
668       LLVM_DEBUG(dbgs() << "[AAReturnedValues] Returned call site has multiple "
669                            "potentially returned values\n");
670       continue;
671     }
672 
673     LLVM_DEBUG({
674       bool UniqueRVIsKnown = RetCSAA->isAtFixpoint();
675       dbgs() << "[AAReturnedValues] Returned call site "
676              << (UniqueRVIsKnown ? "known" : "assumed")
677              << " unique return value: " << *AssumedUniqueRV << "\n";
678     });
679 
680     // The assumed unique return value.
681     Value *AssumedRetVal = AssumedUniqueRV.getValue();
682 
683     // If the assumed unique return value is an argument, lookup the matching
684     // call site operand and recursively collect new returned values.
685     // If it is not an argument, it is just put into the set of returned values
686     // as we would have already looked through casts, phis, and similar values.
687     if (Argument *AssumedRetArg = dyn_cast<Argument>(AssumedRetVal))
688       collectValuesRecursively(A,
689                                RetCS.getArgOperand(AssumedRetArg->getArgNo()),
690                                ReturnInsts, AddRVs);
691     else
692       AddRVs[AssumedRetVal].insert(ReturnInsts.begin(), ReturnInsts.end());
693   }
694 
695   for (auto &It : AddRVs) {
696     assert(!It.second.empty() && "Entry does not add anything.");
697     auto &ReturnInsts = ReturnedValues[It.first];
698     for (ReturnInst *RI : It.second)
699       if (ReturnInsts.insert(RI).second) {
700         LLVM_DEBUG(dbgs() << "[AAReturnedValues] Add new returned value "
701                           << *It.first << " => " << *RI << "\n");
702         Changed = ChangeStatus::CHANGED;
703       }
704   }
705 
706   // If there is no call site in the returned values we are done.
707   if (!HasCallSite) {
708     indicateOptimisticFixpoint();
709     return ChangeStatus::CHANGED;
710   }
711 
712   return Changed;
713 }
714 
715 /// ------------------------ NoSync Function Attribute -------------------------
716 
717 struct AANoSyncImpl : AANoSync {
718   IRPositionConstructorForward(AANoSyncImpl, AANoSync);
719 
720   const std::string getAsStr() const override {
721     return getAssumed() ? "nosync" : "may-sync";
722   }
723 
724   /// See AbstractAttribute::updateImpl(...).
725   ChangeStatus updateImpl(Attributor &A) override;
726 
727   /// Helper function used to determine whether an instruction is non-relaxed
728   /// atomic. In other words, if an atomic instruction does not have unordered
729   /// or monotonic ordering
730   static bool isNonRelaxedAtomic(Instruction *I);
731 
732   /// Helper function used to determine whether an instruction is volatile.
733   static bool isVolatile(Instruction *I);
734 
735   /// Helper function uset to check if intrinsic is volatile (memcpy, memmove,
736   /// memset).
737   static bool isNoSyncIntrinsic(Instruction *I);
738 };
739 
740 struct AANoSyncFunction final : public AANoSyncImpl {
741   AANoSyncFunction(Function &F) : AANoSyncImpl(F, IRP_FUNCTION) {}
742 
743   /// See AbstractAttribute::trackStatistics()
744   void trackStatistics() const override { STATS_DECL_AND_TRACK_FN_ATTR(nosync) }
745 };
746 
747 bool AANoSyncImpl::isNonRelaxedAtomic(Instruction *I) {
748   if (!I->isAtomic())
749     return false;
750 
751   AtomicOrdering Ordering;
752   switch (I->getOpcode()) {
753   case Instruction::AtomicRMW:
754     Ordering = cast<AtomicRMWInst>(I)->getOrdering();
755     break;
756   case Instruction::Store:
757     Ordering = cast<StoreInst>(I)->getOrdering();
758     break;
759   case Instruction::Load:
760     Ordering = cast<LoadInst>(I)->getOrdering();
761     break;
762   case Instruction::Fence: {
763     auto *FI = cast<FenceInst>(I);
764     if (FI->getSyncScopeID() == SyncScope::SingleThread)
765       return false;
766     Ordering = FI->getOrdering();
767     break;
768   }
769   case Instruction::AtomicCmpXchg: {
770     AtomicOrdering Success = cast<AtomicCmpXchgInst>(I)->getSuccessOrdering();
771     AtomicOrdering Failure = cast<AtomicCmpXchgInst>(I)->getFailureOrdering();
772     // Only if both are relaxed, than it can be treated as relaxed.
773     // Otherwise it is non-relaxed.
774     if (Success != AtomicOrdering::Unordered &&
775         Success != AtomicOrdering::Monotonic)
776       return true;
777     if (Failure != AtomicOrdering::Unordered &&
778         Failure != AtomicOrdering::Monotonic)
779       return true;
780     return false;
781   }
782   default:
783     llvm_unreachable(
784         "New atomic operations need to be known in the attributor.");
785   }
786 
787   // Relaxed.
788   if (Ordering == AtomicOrdering::Unordered ||
789       Ordering == AtomicOrdering::Monotonic)
790     return false;
791   return true;
792 }
793 
794 /// Checks if an intrinsic is nosync. Currently only checks mem* intrinsics.
795 /// FIXME: We should ipmrove the handling of intrinsics.
796 bool AANoSyncImpl::isNoSyncIntrinsic(Instruction *I) {
797   if (auto *II = dyn_cast<IntrinsicInst>(I)) {
798     switch (II->getIntrinsicID()) {
799     /// Element wise atomic memory intrinsics are can only be unordered,
800     /// therefore nosync.
801     case Intrinsic::memset_element_unordered_atomic:
802     case Intrinsic::memmove_element_unordered_atomic:
803     case Intrinsic::memcpy_element_unordered_atomic:
804       return true;
805     case Intrinsic::memset:
806     case Intrinsic::memmove:
807     case Intrinsic::memcpy:
808       if (!cast<MemIntrinsic>(II)->isVolatile())
809         return true;
810       return false;
811     default:
812       return false;
813     }
814   }
815   return false;
816 }
817 
818 bool AANoSyncImpl::isVolatile(Instruction *I) {
819   assert(!ImmutableCallSite(I) && !isa<CallBase>(I) &&
820          "Calls should not be checked here");
821 
822   switch (I->getOpcode()) {
823   case Instruction::AtomicRMW:
824     return cast<AtomicRMWInst>(I)->isVolatile();
825   case Instruction::Store:
826     return cast<StoreInst>(I)->isVolatile();
827   case Instruction::Load:
828     return cast<LoadInst>(I)->isVolatile();
829   case Instruction::AtomicCmpXchg:
830     return cast<AtomicCmpXchgInst>(I)->isVolatile();
831   default:
832     return false;
833   }
834 }
835 
836 ChangeStatus AANoSyncImpl::updateImpl(Attributor &A) {
837   Function &F = getAnchorScope();
838 
839   auto CheckRWInstForNoSync = [&](Instruction &I) {
840     /// We are looking for volatile instructions or Non-Relaxed atomics.
841     /// FIXME: We should ipmrove the handling of intrinsics.
842 
843     ImmutableCallSite ICS(&I);
844     auto *NoSyncAA = A.getAAFor<AANoSyncImpl>(*this, I);
845 
846     if (isa<IntrinsicInst>(&I) && isNoSyncIntrinsic(&I))
847       return true;
848 
849     if (ICS && (!NoSyncAA || !NoSyncAA->isAssumedNoSync()) &&
850         !ICS.hasFnAttr(Attribute::NoSync))
851       return false;
852 
853     if (ICS)
854       return true;
855 
856     if (!isVolatile(&I) && !isNonRelaxedAtomic(&I))
857       return true;
858 
859     return false;
860   };
861 
862   auto CheckForNoSync = [&](Instruction &I) {
863     // At this point we handled all read/write effects and they are all
864     // nosync, so they can be skipped.
865     if (I.mayReadOrWriteMemory())
866       return true;
867 
868     // non-convergent and readnone imply nosync.
869     return !ImmutableCallSite(&I).isConvergent();
870   };
871 
872   if (!A.checkForAllReadWriteInstructions(F, CheckRWInstForNoSync, *this) ||
873       !A.checkForAllCallLikeInstructions(F, CheckForNoSync, *this))
874     return indicatePessimisticFixpoint();
875 
876   return ChangeStatus::UNCHANGED;
877 }
878 
879 /// ------------------------ No-Free Attributes ----------------------------
880 
881 struct AANoFreeImpl : public AANoFree {
882   IRPositionConstructorForward(AANoFreeImpl, AANoFree);
883 
884   /// See AbstractAttribute::getAsStr().
885   const std::string getAsStr() const override {
886     return getAssumed() ? "nofree" : "may-free";
887   }
888 
889   /// See AbstractAttribute::updateImpl(...).
890   ChangeStatus updateImpl(Attributor &A) override;
891 };
892 
893 struct AANoFreeFunction final : public AANoFreeImpl {
894   AANoFreeFunction(Function &F) : AANoFreeImpl(F, IRP_FUNCTION) {}
895 
896   /// See AbstractAttribute::trackStatistics()
897   void trackStatistics() const override { STATS_DECL_AND_TRACK_FN_ATTR(nofree) }
898 };
899 
900 ChangeStatus AANoFreeImpl::updateImpl(Attributor &A) {
901   Function &F = getAnchorScope();
902 
903   auto CheckForNoFree = [&](Instruction &I) {
904     if (ImmutableCallSite(&I).hasFnAttr(Attribute::NoFree))
905       return true;
906 
907     auto *NoFreeAA = A.getAAFor<AANoFreeImpl>(*this, I);
908     return NoFreeAA && NoFreeAA->isAssumedNoFree();
909   };
910 
911   if (!A.checkForAllCallLikeInstructions(F, CheckForNoFree, *this))
912     return indicatePessimisticFixpoint();
913   return ChangeStatus::UNCHANGED;
914 }
915 
916 /// ------------------------ NonNull Argument Attribute ------------------------
917 struct AANonNullImpl : AANonNull {
918   IRPositionConstructorForward(AANonNullImpl, AANonNull);
919 
920   /// See AbstractAttribute::getAsStr().
921   const std::string getAsStr() const override {
922     return getAssumed() ? "nonnull" : "may-null";
923   }
924 
925   /// Generate a predicate that checks if a given value is assumed nonnull.
926   /// The generated function returns true if a value satisfies any of
927   /// following conditions.
928   /// (i) A value is known nonZero(=nonnull).
929   /// (ii) A value is associated with AANonNull and its isAssumedNonNull() is
930   /// true.
931   std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
932   generatePredicate(Attributor &);
933 };
934 
935 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
936 AANonNullImpl::generatePredicate(Attributor &A) {
937   // FIXME: The `AAReturnedValues` should provide the predicate with the
938   // `ReturnInst` vector as well such that we can use the control flow sensitive
939   // version of `isKnownNonZero`. This should fix `test11` in
940   // `test/Transforms/FunctionAttrs/nonnull.ll`
941 
942   std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
943       [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
944 
945     if (isKnownNonZero(&RV, A.getDataLayout()))
946       return true;
947 
948     auto *NonNullAA = A.getAAFor<AANonNull>(*this, RV);
949 
950     ImmutableCallSite ICS(&RV);
951 
952     if ((!NonNullAA || !NonNullAA->isAssumedNonNull()) &&
953         (!ICS || !ICS.hasRetAttr(Attribute::NonNull)))
954       return false;
955 
956     return true;
957   };
958 
959   return Pred;
960 }
961 
962 /// NonNull attribute for function return value.
963 struct AANonNullReturned final : AANonNullImpl {
964   AANonNullReturned(Function &F) : AANonNullImpl(F, IRP_RETURNED) {}
965 
966   /// See AbstractAttribute::initialize(...).
967   void initialize(Attributor &A) override {
968     Function &F = getAnchorScope();
969 
970     // Already nonnull.
971     if (F.getAttributes().hasAttribute(AttributeList::ReturnIndex,
972                                        Attribute::NonNull) ||
973         F.getAttributes().hasAttribute(AttributeList::ReturnIndex,
974                                        Attribute::Dereferenceable))
975       indicateOptimisticFixpoint();
976   }
977 
978   /// See AbstractAttribute::updateImpl(...).
979   ChangeStatus updateImpl(Attributor &A) override;
980 
981   /// See AbstractAttribute::trackStatistics()
982   void trackStatistics() const override {
983     STATS_DECL_AND_TRACK_FNRET_ATTR(nonnull)
984   }
985 };
986 
987 ChangeStatus AANonNullReturned::updateImpl(Attributor &A) {
988   Function &F = getAnchorScope();
989 
990   std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred =
991       this->generatePredicate(A);
992 
993   if (!A.checkForAllReturnedValuesAndReturnInsts(F, Pred, *this))
994     return indicatePessimisticFixpoint();
995   return ChangeStatus::UNCHANGED;
996 }
997 
998 /// NonNull attribute for function argument.
999 struct AANonNullArgument final : AANonNullImpl {
1000   AANonNullArgument(Argument &A) : AANonNullImpl(A) {}
1001 
1002   /// See AbstractAttriubute::initialize(...).
1003   void initialize(Attributor &A) override {
1004     Argument *Arg = cast<Argument>(getAssociatedValue());
1005     if (Arg->hasNonNullAttr())
1006       indicateOptimisticFixpoint();
1007   }
1008 
1009   /// See AbstractAttribute::updateImpl(...).
1010   ChangeStatus updateImpl(Attributor &A) override;
1011 
1012   /// See AbstractAttribute::trackStatistics()
1013   void trackStatistics() const override {
1014     STATS_DECL_AND_TRACK_ARG_ATTR(nonnull)
1015   }
1016 };
1017 
1018 /// NonNull attribute for a call site argument.
1019 struct AANonNullCallSiteArgument final : AANonNullImpl {
1020   AANonNullCallSiteArgument(Instruction &I, unsigned ArgNo)
1021       : AANonNullImpl(CallSite(&I).getArgOperand(ArgNo), I, ArgNo) {}
1022 
1023   /// See AbstractAttribute::initialize(...).
1024   void initialize(Attributor &A) override {
1025     CallSite CS(&getAnchorValue());
1026     if (CS.paramHasAttr(getArgNo(), getAttrKind()) ||
1027         CS.paramHasAttr(getArgNo(), Attribute::Dereferenceable) ||
1028         isKnownNonZero(getAssociatedValue(), A.getDataLayout()))
1029       indicateOptimisticFixpoint();
1030   }
1031 
1032   /// See AbstractAttribute::updateImpl(Attributor &A).
1033   ChangeStatus updateImpl(Attributor &A) override;
1034 
1035   /// See AbstractAttribute::trackStatistics()
1036   void trackStatistics() const override {
1037     STATS_DECL_AND_TRACK_CSARG_ATTR(nonnull)
1038   }
1039 };
1040 
1041 ChangeStatus AANonNullArgument::updateImpl(Attributor &A) {
1042   Function &F = getAnchorScope();
1043   unsigned ArgNo = getArgNo();
1044 
1045   // Callback function
1046   std::function<bool(CallSite)> CallSiteCheck = [&](CallSite CS) {
1047     assert(CS && "Sanity check: Call site was not initialized properly!");
1048 
1049     auto *NonNullAA =
1050         A.getAAFor<AANonNullImpl>(*this, *CS.getInstruction(), ArgNo);
1051 
1052     // Check that NonNullAA is AANonNullCallSiteArgument.
1053     if (NonNullAA) {
1054       ImmutableCallSite ICS(&NonNullAA->getAnchorValue());
1055       if (ICS && CS.getInstruction() == ICS.getInstruction())
1056         return NonNullAA->isAssumedNonNull();
1057       return false;
1058     }
1059 
1060     if (CS.paramHasAttr(ArgNo, Attribute::NonNull))
1061       return true;
1062 
1063     Value *V = CS.getArgOperand(ArgNo);
1064     if (isKnownNonZero(V, A.getDataLayout()))
1065       return true;
1066 
1067     return false;
1068   };
1069   if (!A.checkForAllCallSites(F, CallSiteCheck, *this, true))
1070     return indicatePessimisticFixpoint();
1071   return ChangeStatus::UNCHANGED;
1072 }
1073 
1074 ChangeStatus AANonNullCallSiteArgument::updateImpl(Attributor &A) {
1075   // NOTE: Never look at the argument of the callee in this method.
1076   //       If we do this, "nonnull" is always deduced because of the assumption.
1077 
1078   Value &V = *getAssociatedValue();
1079 
1080   auto *NonNullAA = A.getAAFor<AANonNull>(*this, V);
1081 
1082   if (!NonNullAA || !NonNullAA->isAssumedNonNull())
1083     return indicatePessimisticFixpoint();
1084 
1085   return ChangeStatus::UNCHANGED;
1086 }
1087 
1088 /// ------------------------ Will-Return Attributes ----------------------------
1089 
1090 struct AAWillReturnImpl : public AAWillReturn {
1091   IRPositionConstructorForward(AAWillReturnImpl, AAWillReturn);
1092 
1093   /// See AbstractAttribute::getAsStr()
1094   const std::string getAsStr() const override {
1095     return getAssumed() ? "willreturn" : "may-noreturn";
1096   }
1097 };
1098 
1099 struct AAWillReturnFunction final : AAWillReturnImpl {
1100   AAWillReturnFunction(Function &F) : AAWillReturnImpl(F, IRP_FUNCTION) {}
1101 
1102   /// See AbstractAttribute::initialize(...).
1103   void initialize(Attributor &A) override;
1104 
1105   /// See AbstractAttribute::updateImpl(...).
1106   ChangeStatus updateImpl(Attributor &A) override;
1107 
1108   /// See AbstractAttribute::trackStatistics()
1109   void trackStatistics() const override {
1110     STATS_DECL_AND_TRACK_FN_ATTR(willreturn)
1111   }
1112 };
1113 
1114 // Helper function that checks whether a function has any cycle.
1115 // TODO: Replace with more efficent code
1116 bool containsCycle(Function &F) {
1117   SmallPtrSet<BasicBlock *, 32> Visited;
1118 
1119   // Traverse BB by dfs and check whether successor is already visited.
1120   for (BasicBlock *BB : depth_first(&F)) {
1121     Visited.insert(BB);
1122     for (auto *SuccBB : successors(BB)) {
1123       if (Visited.count(SuccBB))
1124         return true;
1125     }
1126   }
1127   return false;
1128 }
1129 
1130 // Helper function that checks the function have a loop which might become an
1131 // endless loop
1132 // FIXME: Any cycle is regarded as endless loop for now.
1133 //        We have to allow some patterns.
1134 bool containsPossiblyEndlessLoop(Function &F) { return containsCycle(F); }
1135 
1136 void AAWillReturnFunction::initialize(Attributor &A) {
1137   Function &F = getAnchorScope();
1138 
1139   if (containsPossiblyEndlessLoop(F))
1140     indicatePessimisticFixpoint();
1141 }
1142 
1143 ChangeStatus AAWillReturnFunction::updateImpl(Attributor &A) {
1144   const Function &F = getAnchorScope();
1145   // The map from instruction opcodes to those instructions in the function.
1146 
1147   auto CheckForWillReturn = [&](Instruction &I) {
1148     ImmutableCallSite ICS(&I);
1149     if (ICS.hasFnAttr(Attribute::WillReturn))
1150       return true;
1151 
1152     auto *WillReturnAA = A.getAAFor<AAWillReturn>(*this, I);
1153     if (!WillReturnAA || !WillReturnAA->isAssumedWillReturn())
1154       return false;
1155 
1156     // FIXME: Prohibit any recursion for now.
1157     if (ICS.hasFnAttr(Attribute::NoRecurse))
1158       return true;
1159 
1160     auto *NoRecurseAA = A.getAAFor<AANoRecurse>(*this, I);
1161     return NoRecurseAA && NoRecurseAA->isAssumedNoRecurse();
1162   };
1163 
1164   if (!A.checkForAllCallLikeInstructions(F, CheckForWillReturn, *this))
1165     return indicatePessimisticFixpoint();
1166 
1167   return ChangeStatus::UNCHANGED;
1168 }
1169 
1170 /// ------------------------ NoAlias Argument Attribute ------------------------
1171 
1172 struct AANoAliasImpl : AANoAlias {
1173   IRPositionConstructorForward(AANoAliasImpl, AANoAlias);
1174 
1175   const std::string getAsStr() const override {
1176     return getAssumed() ? "noalias" : "may-alias";
1177   }
1178 };
1179 
1180 /// NoAlias attribute for function return value.
1181 struct AANoAliasReturned final : AANoAliasImpl {
1182   AANoAliasReturned(Function &F) : AANoAliasImpl(F, IRP_RETURNED) {}
1183 
1184   /// See AbstractAttriubute::initialize(...).
1185   void initialize(Attributor &A) override {
1186     Function &F = getAnchorScope();
1187 
1188     // Already noalias.
1189     if (F.returnDoesNotAlias()) {
1190       indicateOptimisticFixpoint();
1191       return;
1192     }
1193   }
1194 
1195   /// See AbstractAttribute::updateImpl(...).
1196   virtual ChangeStatus updateImpl(Attributor &A) override;
1197 
1198   /// See AbstractAttribute::trackStatistics()
1199   void trackStatistics() const override {
1200     STATS_DECL_AND_TRACK_FNRET_ATTR(noalias)
1201   }
1202 };
1203 
1204 ChangeStatus AANoAliasReturned::updateImpl(Attributor &A) {
1205   Function &F = getAnchorScope();
1206 
1207   auto CheckReturnValue = [&](Value &RV) -> bool {
1208     if (Constant *C = dyn_cast<Constant>(&RV))
1209       if (C->isNullValue() || isa<UndefValue>(C))
1210         return true;
1211 
1212     /// For now, we can only deduce noalias if we have call sites.
1213     /// FIXME: add more support.
1214     ImmutableCallSite ICS(&RV);
1215     if (!ICS)
1216       return false;
1217 
1218     if (!ICS.returnDoesNotAlias()) {
1219       auto *NoAliasAA = A.getAAFor<AANoAlias>(*this, RV);
1220       if (!NoAliasAA || !NoAliasAA->isAssumedNoAlias())
1221         return false;
1222     }
1223 
1224     /// FIXME: We can improve capture check in two ways:
1225     /// 1. Use the AANoCapture facilities.
1226     /// 2. Use the location of return insts for escape queries.
1227     if (PointerMayBeCaptured(&RV, /* ReturnCaptures */ false,
1228                              /* StoreCaptures */ true))
1229       return false;
1230 
1231     return true;
1232   };
1233 
1234   if (!A.checkForAllReturnedValues(F, CheckReturnValue, *this))
1235     return indicatePessimisticFixpoint();
1236 
1237   return ChangeStatus::UNCHANGED;
1238 }
1239 
1240 /// -------------------AAIsDead Function Attribute-----------------------
1241 
1242 struct AAIsDeadImpl : public AAIsDead {
1243   IRPositionConstructorForward(AAIsDeadImpl, AAIsDead);
1244 
1245   void initialize(Attributor &A) override {
1246     const Function &F = getAnchorScope();
1247 
1248     ToBeExploredPaths.insert(&(F.getEntryBlock().front()));
1249     AssumedLiveBlocks.insert(&(F.getEntryBlock()));
1250     for (size_t i = 0; i < ToBeExploredPaths.size(); ++i)
1251       if (const Instruction *NextNoReturnI =
1252               findNextNoReturn(A, ToBeExploredPaths[i]))
1253         NoReturnCalls.insert(NextNoReturnI);
1254   }
1255 
1256   /// Find the next assumed noreturn instruction in the block of \p I starting
1257   /// from, thus including, \p I.
1258   ///
1259   /// The caller is responsible to monitor the ToBeExploredPaths set as new
1260   /// instructions discovered in other basic block will be placed in there.
1261   ///
1262   /// \returns The next assumed noreturn instructions in the block of \p I
1263   ///          starting from, thus including, \p I.
1264   const Instruction *findNextNoReturn(Attributor &A, const Instruction *I);
1265 
1266   /// See AbstractAttribute::getAsStr().
1267   const std::string getAsStr() const override {
1268     return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) + "/" +
1269            std::to_string(getAnchorScope().size()) + "][#NRI " +
1270            std::to_string(NoReturnCalls.size()) + "]";
1271   }
1272 
1273   /// See AbstractAttribute::manifest(...).
1274   ChangeStatus manifest(Attributor &A) override {
1275     assert(getState().isValidState() &&
1276            "Attempted to manifest an invalid state!");
1277 
1278     ChangeStatus HasChanged = ChangeStatus::UNCHANGED;
1279     const Function &F = getAnchorScope();
1280 
1281     // Flag to determine if we can change an invoke to a call assuming the
1282     // callee is nounwind. This is not possible if the personality of the
1283     // function allows to catch asynchronous exceptions.
1284     bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(F);
1285 
1286     for (const Instruction *NRC : NoReturnCalls) {
1287       Instruction *I = const_cast<Instruction *>(NRC);
1288       BasicBlock *BB = I->getParent();
1289       Instruction *SplitPos = I->getNextNode();
1290 
1291       if (auto *II = dyn_cast<InvokeInst>(I)) {
1292         // If we keep the invoke the split position is at the beginning of the
1293         // normal desitination block (it invokes a noreturn function after all).
1294         BasicBlock *NormalDestBB = II->getNormalDest();
1295         SplitPos = &NormalDestBB->front();
1296 
1297         /// Invoke is replaced with a call and unreachable is placed after it if
1298         /// the callee is nounwind and noreturn. Otherwise, we keep the invoke
1299         /// and only place an unreachable in the normal successor.
1300         if (Invoke2CallAllowed) {
1301           if (Function *Callee = II->getCalledFunction()) {
1302             auto *AANoUnw = A.getAAFor<AANoUnwind>(*this, *Callee);
1303             if (Callee->hasFnAttribute(Attribute::NoUnwind) ||
1304                 (AANoUnw && AANoUnw->isAssumedNoUnwind())) {
1305               LLVM_DEBUG(dbgs()
1306                          << "[AAIsDead] Replace invoke with call inst\n");
1307               // We do not need an invoke (II) but instead want a call followed
1308               // by an unreachable. However, we do not remove II as other
1309               // abstract attributes might have it cached as part of their
1310               // results. Given that we modify the CFG anyway, we simply keep II
1311               // around but in a new dead block. To avoid II being live through
1312               // a different edge we have to ensure the block we place it in is
1313               // only reached from the current block of II and then not reached
1314               // at all when we insert the unreachable.
1315               SplitBlockPredecessors(NormalDestBB, {BB}, ".i2c");
1316               CallInst *CI = createCallMatchingInvoke(II);
1317               CI->insertBefore(II);
1318               CI->takeName(II);
1319               II->replaceAllUsesWith(CI);
1320               SplitPos = CI->getNextNode();
1321             }
1322           }
1323         }
1324       }
1325 
1326       BB = SplitPos->getParent();
1327       SplitBlock(BB, SplitPos);
1328       changeToUnreachable(BB->getTerminator(), /* UseLLVMTrap */ false);
1329       HasChanged = ChangeStatus::CHANGED;
1330     }
1331 
1332     return HasChanged;
1333   }
1334 
1335   /// See AbstractAttribute::updateImpl(...).
1336   ChangeStatus updateImpl(Attributor &A) override;
1337 
1338   /// See AAIsDead::isAssumedDead(BasicBlock *).
1339   bool isAssumedDead(const BasicBlock *BB) const override {
1340     assert(BB->getParent() == &getAnchorScope() &&
1341            "BB must be in the same anchor scope function.");
1342 
1343     if (!getAssumed())
1344       return false;
1345     return !AssumedLiveBlocks.count(BB);
1346   }
1347 
1348   /// See AAIsDead::isKnownDead(BasicBlock *).
1349   bool isKnownDead(const BasicBlock *BB) const override {
1350     return getKnown() && isAssumedDead(BB);
1351   }
1352 
1353   /// See AAIsDead::isAssumed(Instruction *I).
1354   bool isAssumedDead(const Instruction *I) const override {
1355     assert(I->getParent()->getParent() == &getAnchorScope() &&
1356            "Instruction must be in the same anchor scope function.");
1357 
1358     if (!getAssumed())
1359       return false;
1360 
1361     // If it is not in AssumedLiveBlocks then it for sure dead.
1362     // Otherwise, it can still be after noreturn call in a live block.
1363     if (!AssumedLiveBlocks.count(I->getParent()))
1364       return true;
1365 
1366     // If it is not after a noreturn call, than it is live.
1367     return isAfterNoReturn(I);
1368   }
1369 
1370   /// See AAIsDead::isKnownDead(Instruction *I).
1371   bool isKnownDead(const Instruction *I) const override {
1372     return getKnown() && isAssumedDead(I);
1373   }
1374 
1375   /// Check if instruction is after noreturn call, in other words, assumed dead.
1376   bool isAfterNoReturn(const Instruction *I) const;
1377 
1378   /// Determine if \p F might catch asynchronous exceptions.
1379   static bool mayCatchAsynchronousExceptions(const Function &F) {
1380     return F.hasPersonalityFn() && !canSimplifyInvokeNoUnwind(&F);
1381   }
1382 
1383   /// Collection of to be explored paths.
1384   SmallSetVector<const Instruction *, 8> ToBeExploredPaths;
1385 
1386   /// Collection of all assumed live BasicBlocks.
1387   DenseSet<const BasicBlock *> AssumedLiveBlocks;
1388 
1389   /// Collection of calls with noreturn attribute, assumed or knwon.
1390   SmallSetVector<const Instruction *, 4> NoReturnCalls;
1391 };
1392 
1393 struct AAIsDeadFunction final : public AAIsDeadImpl {
1394   AAIsDeadFunction(Function &F) : AAIsDeadImpl(F, IRP_FUNCTION) {}
1395 
1396   /// See AbstractAttribute::trackStatistics()
1397   void trackStatistics() const override {
1398     STATS_DECL(DeadBlocks, Function,
1399                "Number of basic blocks classified as dead");
1400     BUILD_STAT_NAME(DeadBlocks, Function) +=
1401         getAnchorScope().size() - AssumedLiveBlocks.size();
1402     STATS_DECL(PartiallyDeadBlocks, Function,
1403                "Number of basic blocks classified as partially dead");
1404     BUILD_STAT_NAME(PartiallyDeadBlocks, Function) += NoReturnCalls.size();
1405   }
1406 };
1407 
1408 bool AAIsDeadImpl::isAfterNoReturn(const Instruction *I) const {
1409   const Instruction *PrevI = I->getPrevNode();
1410   while (PrevI) {
1411     if (NoReturnCalls.count(PrevI))
1412       return true;
1413     PrevI = PrevI->getPrevNode();
1414   }
1415   return false;
1416 }
1417 
1418 const Instruction *AAIsDeadImpl::findNextNoReturn(Attributor &A,
1419                                                   const Instruction *I) {
1420   const BasicBlock *BB = I->getParent();
1421   const Function &F = *BB->getParent();
1422 
1423   // Flag to determine if we can change an invoke to a call assuming the callee
1424   // is nounwind. This is not possible if the personality of the function allows
1425   // to catch asynchronous exceptions.
1426   bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(F);
1427 
1428   // TODO: We should have a function that determines if an "edge" is dead.
1429   //       Edges could be from an instruction to the next or from a terminator
1430   //       to the successor. For now, we need to special case the unwind block
1431   //       of InvokeInst below.
1432 
1433   while (I) {
1434     ImmutableCallSite ICS(I);
1435 
1436     if (ICS) {
1437       // Regarless of the no-return property of an invoke instruction we only
1438       // learn that the regular successor is not reachable through this
1439       // instruction but the unwind block might still be.
1440       if (auto *Invoke = dyn_cast<InvokeInst>(I)) {
1441         // Use nounwind to justify the unwind block is dead as well.
1442         auto *AANoUnw = A.getAAFor<AANoUnwind>(*this, *Invoke);
1443         if (!Invoke2CallAllowed ||
1444             (!AANoUnw || !AANoUnw->isAssumedNoUnwind())) {
1445           AssumedLiveBlocks.insert(Invoke->getUnwindDest());
1446           ToBeExploredPaths.insert(&Invoke->getUnwindDest()->front());
1447         }
1448       }
1449 
1450       auto *NoReturnAA = A.getAAFor<AANoReturn>(*this, *I);
1451       if (ICS.hasFnAttr(Attribute::NoReturn) ||
1452           (NoReturnAA && NoReturnAA->isAssumedNoReturn()))
1453         return I;
1454     }
1455 
1456     I = I->getNextNode();
1457   }
1458 
1459   // get new paths (reachable blocks).
1460   for (const BasicBlock *SuccBB : successors(BB)) {
1461     AssumedLiveBlocks.insert(SuccBB);
1462     ToBeExploredPaths.insert(&SuccBB->front());
1463   }
1464 
1465   // No noreturn instruction found.
1466   return nullptr;
1467 }
1468 
1469 ChangeStatus AAIsDeadImpl::updateImpl(Attributor &A) {
1470   // Temporary collection to iterate over existing noreturn instructions. This
1471   // will alow easier modification of NoReturnCalls collection
1472   SmallVector<const Instruction *, 8> NoReturnChanged;
1473   ChangeStatus Status = ChangeStatus::UNCHANGED;
1474 
1475   for (const Instruction *I : NoReturnCalls)
1476     NoReturnChanged.push_back(I);
1477 
1478   for (const Instruction *I : NoReturnChanged) {
1479     size_t Size = ToBeExploredPaths.size();
1480 
1481     const Instruction *NextNoReturnI = findNextNoReturn(A, I);
1482     if (NextNoReturnI != I) {
1483       Status = ChangeStatus::CHANGED;
1484       NoReturnCalls.remove(I);
1485       if (NextNoReturnI)
1486         NoReturnCalls.insert(NextNoReturnI);
1487     }
1488 
1489     // Explore new paths.
1490     while (Size != ToBeExploredPaths.size()) {
1491       Status = ChangeStatus::CHANGED;
1492       if (const Instruction *NextNoReturnI =
1493               findNextNoReturn(A, ToBeExploredPaths[Size++]))
1494         NoReturnCalls.insert(NextNoReturnI);
1495     }
1496   }
1497 
1498   LLVM_DEBUG(
1499       dbgs() << "[AAIsDead] AssumedLiveBlocks: " << AssumedLiveBlocks.size()
1500              << " Total number of blocks: " << getAnchorScope().size() << "\n");
1501 
1502   // If we know everything is live there is no need to query for liveness.
1503   if (NoReturnCalls.empty() &&
1504       getAnchorScope().size() == AssumedLiveBlocks.size()) {
1505     // Indicating a pessimistic fixpoint will cause the state to be "invalid"
1506     // which will cause the Attributor to not return the AAIsDead on request,
1507     // which will prevent us from querying isAssumedDead().
1508     indicatePessimisticFixpoint();
1509     assert(!isValidState() && "Expected an invalid state!");
1510   }
1511 
1512   return Status;
1513 }
1514 
1515 /// -------------------- Dereferenceable Argument Attribute --------------------
1516 
1517 struct DerefState : AbstractState {
1518 
1519   /// State representing for dereferenceable bytes.
1520   IntegerState DerefBytesState;
1521 
1522   /// State representing that whether the value is nonnull or global.
1523   IntegerState NonNullGlobalState;
1524 
1525   /// Bits encoding for NonNullGlobalState.
1526   enum {
1527     DEREF_NONNULL = 1 << 0,
1528     DEREF_GLOBAL = 1 << 1,
1529   };
1530 
1531   /// See AbstractState::isValidState()
1532   bool isValidState() const override { return DerefBytesState.isValidState(); }
1533 
1534   /// See AbstractState::isAtFixpoint()
1535   bool isAtFixpoint() const override {
1536     return !isValidState() || (DerefBytesState.isAtFixpoint() &&
1537                                NonNullGlobalState.isAtFixpoint());
1538   }
1539 
1540   /// See AbstractState::indicateOptimisticFixpoint(...)
1541   ChangeStatus indicateOptimisticFixpoint() override {
1542     DerefBytesState.indicateOptimisticFixpoint();
1543     NonNullGlobalState.indicateOptimisticFixpoint();
1544     return ChangeStatus::UNCHANGED;
1545   }
1546 
1547   /// See AbstractState::indicatePessimisticFixpoint(...)
1548   ChangeStatus indicatePessimisticFixpoint() override {
1549     DerefBytesState.indicatePessimisticFixpoint();
1550     NonNullGlobalState.indicatePessimisticFixpoint();
1551     return ChangeStatus::CHANGED;
1552   }
1553 
1554   /// Update known dereferenceable bytes.
1555   void takeKnownDerefBytesMaximum(uint64_t Bytes) {
1556     DerefBytesState.takeKnownMaximum(Bytes);
1557   }
1558 
1559   /// Update assumed dereferenceable bytes.
1560   void takeAssumedDerefBytesMinimum(uint64_t Bytes) {
1561     DerefBytesState.takeAssumedMinimum(Bytes);
1562   }
1563 
1564   /// Update assumed NonNullGlobalState
1565   void updateAssumedNonNullGlobalState(bool IsNonNull, bool IsGlobal) {
1566     if (!IsNonNull)
1567       NonNullGlobalState.removeAssumedBits(DEREF_NONNULL);
1568     if (!IsGlobal)
1569       NonNullGlobalState.removeAssumedBits(DEREF_GLOBAL);
1570   }
1571 
1572   /// Equality for DerefState.
1573   bool operator==(const DerefState &R) {
1574     return this->DerefBytesState == R.DerefBytesState &&
1575            this->NonNullGlobalState == R.NonNullGlobalState;
1576   }
1577 };
1578 
1579 struct AADereferenceableImpl : AADereferenceable, DerefState {
1580   IRPositionConstructorForward(AADereferenceableImpl, AADereferenceable);
1581   using StateType = DerefState;
1582 
1583   /// See AbstractAttribute::getState()
1584   /// {
1585   StateType &getState() override { return *this; }
1586   const StateType &getState() const override { return *this; }
1587   /// }
1588 
1589   /// See AADereferenceable::getAssumedDereferenceableBytes().
1590   uint32_t getAssumedDereferenceableBytes() const override {
1591     return DerefBytesState.getAssumed();
1592   }
1593 
1594   /// See AADereferenceable::getKnownDereferenceableBytes().
1595   uint32_t getKnownDereferenceableBytes() const override {
1596     return DerefBytesState.getKnown();
1597   }
1598 
1599   // Helper function for syncing nonnull state.
1600   void syncNonNull(const AANonNull *NonNullAA) {
1601     if (!NonNullAA) {
1602       NonNullGlobalState.removeAssumedBits(DEREF_NONNULL);
1603       return;
1604     }
1605 
1606     if (NonNullAA->isKnownNonNull())
1607       NonNullGlobalState.addKnownBits(DEREF_NONNULL);
1608 
1609     if (!NonNullAA->isAssumedNonNull())
1610       NonNullGlobalState.removeAssumedBits(DEREF_NONNULL);
1611   }
1612 
1613   /// See AADereferenceable::isAssumedGlobal().
1614   bool isAssumedGlobal() const override {
1615     return NonNullGlobalState.isAssumed(DEREF_GLOBAL);
1616   }
1617 
1618   /// See AADereferenceable::isKnownGlobal().
1619   bool isKnownGlobal() const override {
1620     return NonNullGlobalState.isKnown(DEREF_GLOBAL);
1621   }
1622 
1623   /// See AADereferenceable::isAssumedNonNull().
1624   bool isAssumedNonNull() const override {
1625     return NonNullGlobalState.isAssumed(DEREF_NONNULL);
1626   }
1627 
1628   /// See AADereferenceable::isKnownNonNull().
1629   bool isKnownNonNull() const override {
1630     return NonNullGlobalState.isKnown(DEREF_NONNULL);
1631   }
1632 
1633   void getDeducedAttributes(LLVMContext &Ctx,
1634                             SmallVectorImpl<Attribute> &Attrs) const override {
1635     // TODO: Add *_globally support
1636     if (isAssumedNonNull())
1637       Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
1638           Ctx, getAssumedDereferenceableBytes()));
1639     else
1640       Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
1641           Ctx, getAssumedDereferenceableBytes()));
1642   }
1643   uint64_t computeAssumedDerefenceableBytes(Attributor &A, Value &V,
1644                                             bool &IsNonNull, bool &IsGlobal);
1645 
1646   void initialize(Attributor &A) override {
1647     Function &F = getAnchorScope();
1648     unsigned AttrIdx = getIRPosition().getAttrIdx();
1649 
1650     for (Attribute::AttrKind AK :
1651          {Attribute::Dereferenceable, Attribute::DereferenceableOrNull})
1652       if (F.getAttributes().hasAttribute(AttrIdx, AK))
1653         takeKnownDerefBytesMaximum(F.getAttribute(AttrIdx, AK).getValueAsInt());
1654   }
1655 
1656   /// See AbstractAttribute::getAsStr().
1657   const std::string getAsStr() const override {
1658     if (!getAssumedDereferenceableBytes())
1659       return "unknown-dereferenceable";
1660     return std::string("dereferenceable") +
1661            (isAssumedNonNull() ? "" : "_or_null") +
1662            (isAssumedGlobal() ? "_globally" : "") + "<" +
1663            std::to_string(getKnownDereferenceableBytes()) + "-" +
1664            std::to_string(getAssumedDereferenceableBytes()) + ">";
1665   }
1666 };
1667 
1668 struct AADereferenceableReturned final : AADereferenceableImpl {
1669   AADereferenceableReturned(Function &F)
1670       : AADereferenceableImpl(F, IRP_RETURNED) {}
1671 
1672   /// See AbstractAttribute::updateImpl(...).
1673   ChangeStatus updateImpl(Attributor &A) override;
1674 
1675   /// See AbstractAttribute::trackStatistics()
1676   void trackStatistics() const override {
1677     STATS_DECL_AND_TRACK_FNRET_ATTR(dereferenceable)
1678   }
1679 };
1680 
1681 // Helper function that returns dereferenceable bytes.
1682 static uint64_t calcDifferenceIfBaseIsNonNull(int64_t DerefBytes,
1683                                               int64_t Offset, bool IsNonNull) {
1684   if (!IsNonNull)
1685     return 0;
1686   return std::max((int64_t)0, DerefBytes - Offset);
1687 }
1688 
1689 uint64_t AADereferenceableImpl::computeAssumedDerefenceableBytes(
1690     Attributor &A, Value &V, bool &IsNonNull, bool &IsGlobal) {
1691   // TODO: Tracking the globally flag.
1692   IsGlobal = false;
1693 
1694   // First, we try to get information about V from Attributor.
1695   if (auto *DerefAA = A.getAAFor<AADereferenceable>(*this, V)) {
1696     IsNonNull &= DerefAA->isAssumedNonNull();
1697     return DerefAA->getAssumedDereferenceableBytes();
1698   }
1699 
1700   // Otherwise, we try to compute assumed bytes from base pointer.
1701   const DataLayout &DL = A.getDataLayout();
1702   unsigned IdxWidth =
1703       DL.getIndexSizeInBits(V.getType()->getPointerAddressSpace());
1704   APInt Offset(IdxWidth, 0);
1705   Value *Base = V.stripAndAccumulateInBoundsConstantOffsets(DL, Offset);
1706 
1707   if (auto *BaseDerefAA = A.getAAFor<AADereferenceable>(*this, *Base)) {
1708     IsNonNull &= Offset != 0;
1709     return calcDifferenceIfBaseIsNonNull(
1710         BaseDerefAA->getAssumedDereferenceableBytes(), Offset.getSExtValue(),
1711         Offset != 0 || BaseDerefAA->isAssumedNonNull());
1712   }
1713 
1714   // Then, use IR information.
1715 
1716   if (isDereferenceablePointer(Base, Base->getType(), DL))
1717     return calcDifferenceIfBaseIsNonNull(
1718         DL.getTypeStoreSize(Base->getType()->getPointerElementType()),
1719         Offset.getSExtValue(),
1720         !NullPointerIsDefined(&getAnchorScope(),
1721                               V.getType()->getPointerAddressSpace()));
1722 
1723   IsNonNull = false;
1724   return 0;
1725 }
1726 
1727 ChangeStatus AADereferenceableReturned::updateImpl(Attributor &A) {
1728   Function &F = getAnchorScope();
1729   auto BeforeState = static_cast<DerefState>(*this);
1730 
1731   syncNonNull(A.getAAFor<AANonNull>(*this, F));
1732 
1733   bool IsNonNull = isAssumedNonNull();
1734   bool IsGlobal = isAssumedGlobal();
1735 
1736   auto CheckReturnValue = [&](Value &RV) -> bool {
1737     takeAssumedDerefBytesMinimum(
1738         computeAssumedDerefenceableBytes(A, RV, IsNonNull, IsGlobal));
1739     return isValidState();
1740   };
1741 
1742   if (A.checkForAllReturnedValues(F, CheckReturnValue, *this)) {
1743     updateAssumedNonNullGlobalState(IsNonNull, IsGlobal);
1744     return BeforeState == static_cast<DerefState>(*this)
1745                ? ChangeStatus::UNCHANGED
1746                : ChangeStatus::CHANGED;
1747   }
1748   return indicatePessimisticFixpoint();
1749 }
1750 
1751 struct AADereferenceableArgument final : AADereferenceableImpl {
1752   AADereferenceableArgument(Argument &A) : AADereferenceableImpl(A) {}
1753 
1754   /// See AbstractAttribute::updateImpl(...).
1755   ChangeStatus updateImpl(Attributor &A) override;
1756 
1757   /// See AbstractAttribute::trackStatistics()
1758   void trackStatistics() const override {
1759     STATS_DECL_AND_TRACK_ARG_ATTR(dereferenceable)
1760   }
1761 };
1762 
1763 ChangeStatus AADereferenceableArgument::updateImpl(Attributor &A) {
1764   Function &F = getAnchorScope();
1765   Argument &Arg = cast<Argument>(getAnchorValue());
1766 
1767   auto BeforeState = static_cast<DerefState>(*this);
1768 
1769   unsigned ArgNo = Arg.getArgNo();
1770 
1771   syncNonNull(A.getAAFor<AANonNull>(*this, F, ArgNo));
1772 
1773   bool IsNonNull = isAssumedNonNull();
1774   bool IsGlobal = isAssumedGlobal();
1775 
1776   // Callback function
1777   std::function<bool(CallSite)> CallSiteCheck = [&](CallSite CS) -> bool {
1778     assert(CS && "Sanity check: Call site was not initialized properly!");
1779 
1780     // Check that DereferenceableAA is AADereferenceableCallSiteArgument.
1781     if (auto *DereferenceableAA =
1782             A.getAAFor<AADereferenceable>(*this, *CS.getInstruction(), ArgNo)) {
1783       ImmutableCallSite ICS(
1784           &DereferenceableAA->getIRPosition().getAnchorValue());
1785       if (ICS && CS.getInstruction() == ICS.getInstruction()) {
1786         takeAssumedDerefBytesMinimum(
1787             DereferenceableAA->getAssumedDereferenceableBytes());
1788         IsNonNull &= DereferenceableAA->isAssumedNonNull();
1789         IsGlobal &= DereferenceableAA->isAssumedGlobal();
1790         return isValidState();
1791       }
1792     }
1793 
1794     takeAssumedDerefBytesMinimum(computeAssumedDerefenceableBytes(
1795         A, *CS.getArgOperand(ArgNo), IsNonNull, IsGlobal));
1796 
1797     return isValidState();
1798   };
1799 
1800   if (!A.checkForAllCallSites(F, CallSiteCheck, *this, true))
1801     return indicatePessimisticFixpoint();
1802 
1803   updateAssumedNonNullGlobalState(IsNonNull, IsGlobal);
1804 
1805   return BeforeState == static_cast<DerefState>(*this) ? ChangeStatus::UNCHANGED
1806                                                        : ChangeStatus::CHANGED;
1807 }
1808 
1809 /// Dereferenceable attribute for a call site argument.
1810 struct AADereferenceableCallSiteArgument final : AADereferenceableImpl {
1811   AADereferenceableCallSiteArgument(Instruction &I, unsigned ArgNo)
1812       : AADereferenceableImpl(CallSite(&I).getArgOperand(ArgNo), I, ArgNo) {}
1813 
1814   /// See AbstractAttribute::initialize(...).
1815   void initialize(Attributor &A) override {
1816     CallSite CS(&getAnchorValue());
1817     if (CS.paramHasAttr(getArgNo(), Attribute::Dereferenceable))
1818       takeKnownDerefBytesMaximum(CS.getDereferenceableBytes(getArgNo()));
1819 
1820     if (CS.paramHasAttr(getArgNo(), Attribute::DereferenceableOrNull))
1821       takeKnownDerefBytesMaximum(CS.getDereferenceableOrNullBytes(getArgNo()));
1822   }
1823 
1824   /// See AbstractAttribute::updateImpl(Attributor &A).
1825   ChangeStatus updateImpl(Attributor &A) override;
1826 
1827   /// See AbstractAttribute::trackStatistics()
1828   void trackStatistics() const override {
1829     STATS_DECL_AND_TRACK_CSARG_ATTR(dereferenceable)
1830   }
1831 };
1832 
1833 ChangeStatus AADereferenceableCallSiteArgument::updateImpl(Attributor &A) {
1834   // NOTE: Never look at the argument of the callee in this method.
1835   //       If we do this, "dereferenceable" is always deduced because of the
1836   //       assumption.
1837 
1838   Value &V = *getAssociatedValue();
1839 
1840   auto BeforeState = static_cast<DerefState>(*this);
1841 
1842   syncNonNull(A.getAAFor<AANonNull>(*this, getAnchorValue(), getArgNo()));
1843   bool IsNonNull = isAssumedNonNull();
1844   bool IsGlobal = isKnownGlobal();
1845 
1846   takeAssumedDerefBytesMinimum(
1847       computeAssumedDerefenceableBytes(A, V, IsNonNull, IsGlobal));
1848   updateAssumedNonNullGlobalState(IsNonNull, IsGlobal);
1849 
1850   return BeforeState == static_cast<DerefState>(*this) ? ChangeStatus::UNCHANGED
1851                                                        : ChangeStatus::CHANGED;
1852 }
1853 
1854 // ------------------------ Align Argument Attribute ------------------------
1855 
1856 struct AAAlignImpl : AAAlign {
1857   IRPositionConstructorForward(AAAlignImpl, AAAlign);
1858 
1859   // Max alignemnt value allowed in IR
1860   static const unsigned MAX_ALIGN = 1U << 29;
1861 
1862   const std::string getAsStr() const override {
1863     return getAssumedAlign() ? ("align<" + std::to_string(getKnownAlign()) +
1864                                 "-" + std::to_string(getAssumedAlign()) + ">")
1865                              : "unknown-align";
1866   }
1867 
1868   /// See AbstractAttriubute::initialize(...).
1869   void initialize(Attributor &A) override {
1870     takeAssumedMinimum(MAX_ALIGN);
1871 
1872     Function &F = getAnchorScope();
1873 
1874     unsigned AttrIdx = getAttrIdx();
1875     // Already the function has align attribute on return value or argument.
1876     if (F.getAttributes().hasAttribute(AttrIdx, Attribute::Alignment))
1877       addKnownBits(
1878           F.getAttribute(AttrIdx, Attribute::Alignment).getAlignment());
1879   }
1880 
1881   /// See AbstractAttribute::getDeducedAttributes
1882   virtual void
1883   getDeducedAttributes(LLVMContext &Ctx,
1884                        SmallVectorImpl<Attribute> &Attrs) const override {
1885     Attrs.emplace_back(Attribute::getWithAlignment(Ctx, getAssumedAlign()));
1886   }
1887 };
1888 
1889 /// Align attribute for function return value.
1890 struct AAAlignReturned final : AAAlignImpl {
1891   AAAlignReturned(Function &F) : AAAlignImpl(F, IRP_RETURNED) {}
1892 
1893   /// See AbstractAttribute::updateImpl(...).
1894   ChangeStatus updateImpl(Attributor &A) override;
1895 
1896   /// See AbstractAttribute::trackStatistics()
1897   void trackStatistics() const override {
1898     STATS_DECL_AND_TRACK_FNRET_ATTR(aligned)
1899   }
1900 };
1901 
1902 ChangeStatus AAAlignReturned::updateImpl(Attributor &A) {
1903   Function &F = getAnchorScope();
1904 
1905   // Currently, align<n> is deduced if alignments in return values are assumed
1906   // as greater than n. We reach pessimistic fixpoint if any of the return value
1907   // wouldn't have align. If no assumed state was used for reasoning, an
1908   // optimistic fixpoint is reached earlier.
1909 
1910   base_t BeforeState = getAssumed();
1911   auto CheckReturnValue =
1912       [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool {
1913     auto *AlignAA = A.getAAFor<AAAlign>(*this, RV);
1914 
1915     if (AlignAA)
1916       takeAssumedMinimum(AlignAA->getAssumedAlign());
1917     else
1918       // Use IR information.
1919       takeAssumedMinimum(RV.getPointerAlignment(A.getDataLayout()));
1920 
1921     return isValidState();
1922   };
1923 
1924   if (!A.checkForAllReturnedValuesAndReturnInsts(F, CheckReturnValue, *this))
1925     return indicatePessimisticFixpoint();
1926 
1927   return (getAssumed() != BeforeState) ? ChangeStatus::CHANGED
1928                                        : ChangeStatus::UNCHANGED;
1929 }
1930 
1931 /// Align attribute for function argument.
1932 struct AAAlignArgument final : AAAlignImpl {
1933   AAAlignArgument(Argument &A) : AAAlignImpl(A) {}
1934 
1935   /// See AbstractAttribute::updateImpl(...).
1936   virtual ChangeStatus updateImpl(Attributor &A) override;
1937 
1938   /// See AbstractAttribute::trackStatistics()
1939   void trackStatistics() const override{STATS_DECL_AND_TRACK_ARG_ATTR(aligned)};
1940 };
1941 
1942 ChangeStatus AAAlignArgument::updateImpl(Attributor &A) {
1943 
1944   Function &F = getAnchorScope();
1945   Argument &Arg = cast<Argument>(getAnchorValue());
1946 
1947   unsigned ArgNo = Arg.getArgNo();
1948   const DataLayout &DL = A.getDataLayout();
1949 
1950   auto BeforeState = getAssumed();
1951 
1952   // Callback function
1953   std::function<bool(CallSite)> CallSiteCheck = [&](CallSite CS) {
1954     assert(CS && "Sanity check: Call site was not initialized properly!");
1955 
1956     auto *AlignAA = A.getAAFor<AAAlign>(*this, *CS.getInstruction(), ArgNo);
1957 
1958     // Check that AlignAA is AAAlignCallSiteArgument.
1959     if (AlignAA) {
1960       ImmutableCallSite ICS(&AlignAA->getIRPosition().getAnchorValue());
1961       if (ICS && CS.getInstruction() == ICS.getInstruction()) {
1962         takeAssumedMinimum(AlignAA->getAssumedAlign());
1963         return isValidState();
1964       }
1965     }
1966 
1967     Value *V = CS.getArgOperand(ArgNo);
1968     takeAssumedMinimum(V->getPointerAlignment(DL));
1969     return isValidState();
1970   };
1971 
1972   if (!A.checkForAllCallSites(F, CallSiteCheck, *this, true))
1973     indicatePessimisticFixpoint();
1974 
1975   return BeforeState == getAssumed() ? ChangeStatus::UNCHANGED
1976                                      : ChangeStatus ::CHANGED;
1977 }
1978 
1979 struct AAAlignCallSiteArgument final : AAAlignImpl {
1980   AAAlignCallSiteArgument(Instruction &I, unsigned ArgNo)
1981       : AAAlignImpl(CallSite(&I).getArgOperand(ArgNo), I, ArgNo) {}
1982 
1983   /// See AbstractAttribute::initialize(...).
1984   void initialize(Attributor &A) override {
1985     CallSite CS(&getAnchorValue());
1986     takeKnownMaximum(
1987         getAssociatedValue()->getPointerAlignment(A.getDataLayout()));
1988   }
1989 
1990   /// See AbstractAttribute::updateImpl(Attributor &A).
1991   ChangeStatus updateImpl(Attributor &A) override;
1992 
1993   /// See AbstractAttribute::trackStatistics()
1994   void trackStatistics() const override {
1995     STATS_DECL_AND_TRACK_CSARG_ATTR(aligned)
1996   }
1997 };
1998 
1999 ChangeStatus AAAlignCallSiteArgument::updateImpl(Attributor &A) {
2000   // NOTE: Never look at the argument of the callee in this method.
2001   //       If we do this, "align" is always deduced because of the assumption.
2002 
2003   auto BeforeState = getAssumed();
2004 
2005   Value &V = *getAssociatedValue();
2006 
2007   auto *AlignAA = A.getAAFor<AAAlign>(*this, V);
2008 
2009   if (AlignAA)
2010     takeAssumedMinimum(AlignAA->getAssumedAlign());
2011   else
2012     indicatePessimisticFixpoint();
2013 
2014   return BeforeState == getAssumed() ? ChangeStatus::UNCHANGED
2015                                      : ChangeStatus::CHANGED;
2016 }
2017 
2018 /// ------------------ Function No-Return Attribute ----------------------------
2019 struct AANoReturnImpl : public AANoReturn {
2020   IRPositionConstructorForward(AANoReturnImpl, AANoReturn);
2021 
2022   /// See AbstractAttribute::getAsStr().
2023   const std::string getAsStr() const override {
2024     return getAssumed() ? "noreturn" : "may-return";
2025   }
2026 
2027   /// See AbstractAttribute::initialize(...).
2028   void initialize(Attributor &A) override {
2029     Function &F = getAnchorScope();
2030     if (F.hasFnAttribute(getAttrKind()))
2031       indicateOptimisticFixpoint();
2032   }
2033 
2034   /// See AbstractAttribute::updateImpl(Attributor &A).
2035   virtual ChangeStatus updateImpl(Attributor &A) override {
2036     const Function &F = getAnchorScope();
2037     auto CheckForNoReturn = [](Instruction &) { return false; };
2038     if (!A.checkForAllInstructions(F, CheckForNoReturn, *this,
2039                                    {(unsigned)Instruction::Ret}))
2040       return indicatePessimisticFixpoint();
2041     return ChangeStatus::UNCHANGED;
2042   }
2043 };
2044 
2045 struct AANoReturnFunction final : AANoReturnImpl {
2046   AANoReturnFunction(Function &F) : AANoReturnImpl(F, IRP_FUNCTION) {}
2047 
2048   /// See AbstractAttribute::trackStatistics()
2049   void trackStatistics() const override {
2050     STATS_DECL_AND_TRACK_FN_ATTR(noreturn)
2051   }
2052 };
2053 
2054 /// ----------------------------------------------------------------------------
2055 ///                               Attributor
2056 /// ----------------------------------------------------------------------------
2057 
2058 bool Attributor::checkForAllCallSites(Function &F,
2059                                       std::function<bool(CallSite)> &Pred,
2060                                       const AbstractAttribute &QueryingAA,
2061                                       bool RequireAllCallSites) {
2062   // We can try to determine information from
2063   // the call sites. However, this is only possible all call sites are known,
2064   // hence the function has internal linkage.
2065   if (RequireAllCallSites && !F.hasInternalLinkage()) {
2066     LLVM_DEBUG(
2067         dbgs()
2068         << "Attributor: Function " << F.getName()
2069         << " has no internal linkage, hence not all call sites are known\n");
2070     return false;
2071   }
2072 
2073   for (const Use &U : F.uses()) {
2074     Instruction *I = cast<Instruction>(U.getUser());
2075     Function *AnchorValue = I->getParent()->getParent();
2076 
2077     auto *LivenessAA = getAAFor<AAIsDead>(QueryingAA, *AnchorValue);
2078 
2079     // Skip dead calls.
2080     if (LivenessAA && LivenessAA->isAssumedDead(I))
2081       continue;
2082 
2083     CallSite CS(U.getUser());
2084     if (!CS || !CS.isCallee(&U) || !CS.getCaller()->hasExactDefinition()) {
2085       if (!RequireAllCallSites)
2086         continue;
2087 
2088       LLVM_DEBUG(dbgs() << "Attributor: User " << *U.getUser()
2089                         << " is an invalid use of " << F.getName() << "\n");
2090       return false;
2091     }
2092 
2093     if (Pred(CS))
2094       continue;
2095 
2096     LLVM_DEBUG(dbgs() << "Attributor: Call site callback failed for "
2097                       << *CS.getInstruction() << "\n");
2098     return false;
2099   }
2100 
2101   return true;
2102 }
2103 
2104 bool Attributor::checkForAllReturnedValuesAndReturnInsts(
2105     const Function &F,
2106     const function_ref<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)>
2107         &Pred,
2108     const AbstractAttribute &QueryingAA) {
2109 
2110   auto *AARetVal = getAAFor<AAReturnedValues>(QueryingAA, F);
2111   if (!AARetVal)
2112     return false;
2113 
2114   auto *LivenessAA = getAAFor<AAIsDead>(QueryingAA, F);
2115   if (!LivenessAA)
2116     return AARetVal->checkForAllReturnedValuesAndReturnInsts(Pred);
2117 
2118   auto LivenessFilter = [&](Value &RV,
2119                             const SmallPtrSetImpl<ReturnInst *> &ReturnInsts) {
2120     SmallPtrSet<ReturnInst *, 4> FilteredReturnInsts;
2121     for (ReturnInst *RI : ReturnInsts)
2122       if (!LivenessAA->isAssumedDead(RI))
2123         FilteredReturnInsts.insert(RI);
2124     if (!FilteredReturnInsts.empty())
2125       return Pred(RV, FilteredReturnInsts);
2126     return true;
2127   };
2128 
2129   return AARetVal->checkForAllReturnedValuesAndReturnInsts(LivenessFilter);
2130 }
2131 
2132 bool Attributor::checkForAllReturnedValues(
2133     const Function &F, const function_ref<bool(Value &)> &Pred,
2134     const AbstractAttribute &QueryingAA) {
2135 
2136   auto *AARetVal = getAAFor<AAReturnedValues>(QueryingAA, F);
2137   if (!AARetVal)
2138     return false;
2139 
2140   auto *LivenessAA = getAAFor<AAIsDead>(QueryingAA, F);
2141   if (!LivenessAA)
2142     return AARetVal->checkForAllReturnedValuesAndReturnInsts(
2143         [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &) {
2144           return Pred(RV);
2145         });
2146 
2147   auto LivenessFilter = [&](Value &RV,
2148                             const SmallPtrSetImpl<ReturnInst *> &ReturnInsts) {
2149     if (LivenessAA->isLiveInstSet(ReturnInsts.begin(), ReturnInsts.end()))
2150       return Pred(RV);
2151     return true;
2152   };
2153 
2154   return AARetVal->checkForAllReturnedValuesAndReturnInsts(LivenessFilter);
2155 }
2156 
2157 bool Attributor::checkForAllInstructions(
2158     const Function &F, const llvm::function_ref<bool(Instruction &)> &Pred,
2159     const AbstractAttribute &QueryingAA, const ArrayRef<unsigned> &Opcodes) {
2160 
2161   auto *LivenessAA = getAAFor<AAIsDead>(QueryingAA, F);
2162 
2163   auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F);
2164   for (unsigned Opcode : Opcodes) {
2165     for (Instruction *I : OpcodeInstMap[Opcode]) {
2166       // Skip dead instructions.
2167       if (LivenessAA && LivenessAA->isAssumedDead(I))
2168         continue;
2169 
2170       if (!Pred(*I))
2171         return false;
2172     }
2173   }
2174 
2175   return true;
2176 }
2177 
2178 bool Attributor::checkForAllReadWriteInstructions(
2179     const Function &F, const llvm::function_ref<bool(Instruction &)> &Pred,
2180     AbstractAttribute &QueryingAA) {
2181 
2182   auto *LivenessAA = getAAFor<AAIsDead>(QueryingAA, F);
2183 
2184   for (Instruction *I : InfoCache.getReadOrWriteInstsForFunction(F)) {
2185     // Skip dead instructions.
2186     if (LivenessAA && LivenessAA->isAssumedDead(I))
2187       continue;
2188 
2189     if (!Pred(*I))
2190       return false;
2191   }
2192 
2193   return true;
2194 }
2195 
2196 ChangeStatus Attributor::run() {
2197   // Initialize all abstract attributes.
2198   for (AbstractAttribute *AA : AllAbstractAttributes)
2199     AA->initialize(*this);
2200 
2201   LLVM_DEBUG(dbgs() << "[Attributor] Identified and initialized "
2202                     << AllAbstractAttributes.size()
2203                     << " abstract attributes.\n");
2204 
2205   // Now that all abstract attributes are collected and initialized we start
2206   // the abstract analysis.
2207 
2208   unsigned IterationCounter = 1;
2209 
2210   SmallVector<AbstractAttribute *, 64> ChangedAAs;
2211   SetVector<AbstractAttribute *> Worklist;
2212   Worklist.insert(AllAbstractAttributes.begin(), AllAbstractAttributes.end());
2213 
2214   do {
2215     LLVM_DEBUG(dbgs() << "\n\n[Attributor] #Iteration: " << IterationCounter
2216                       << ", Worklist size: " << Worklist.size() << "\n");
2217 
2218     // Add all abstract attributes that are potentially dependent on one that
2219     // changed to the work list.
2220     for (AbstractAttribute *ChangedAA : ChangedAAs) {
2221       auto &QuerriedAAs = QueryMap[ChangedAA];
2222       Worklist.insert(QuerriedAAs.begin(), QuerriedAAs.end());
2223     }
2224 
2225     // Reset the changed set.
2226     ChangedAAs.clear();
2227 
2228     // Update all abstract attribute in the work list and record the ones that
2229     // changed.
2230     for (AbstractAttribute *AA : Worklist)
2231       if (AA->update(*this) == ChangeStatus::CHANGED)
2232         ChangedAAs.push_back(AA);
2233 
2234     // Reset the work list and repopulate with the changed abstract attributes.
2235     // Note that dependent ones are added above.
2236     Worklist.clear();
2237     Worklist.insert(ChangedAAs.begin(), ChangedAAs.end());
2238 
2239   } while (!Worklist.empty() && ++IterationCounter < MaxFixpointIterations);
2240 
2241   LLVM_DEBUG(dbgs() << "\n[Attributor] Fixpoint iteration done after: "
2242                     << IterationCounter << "/" << MaxFixpointIterations
2243                     << " iterations\n");
2244 
2245   bool FinishedAtFixpoint = Worklist.empty();
2246 
2247   // Reset abstract arguments not settled in a sound fixpoint by now. This
2248   // happens when we stopped the fixpoint iteration early. Note that only the
2249   // ones marked as "changed" *and* the ones transitively depending on them
2250   // need to be reverted to a pessimistic state. Others might not be in a
2251   // fixpoint state but we can use the optimistic results for them anyway.
2252   SmallPtrSet<AbstractAttribute *, 32> Visited;
2253   for (unsigned u = 0; u < ChangedAAs.size(); u++) {
2254     AbstractAttribute *ChangedAA = ChangedAAs[u];
2255     if (!Visited.insert(ChangedAA).second)
2256       continue;
2257 
2258     AbstractState &State = ChangedAA->getState();
2259     if (!State.isAtFixpoint()) {
2260       State.indicatePessimisticFixpoint();
2261 
2262       NumAttributesTimedOut++;
2263     }
2264 
2265     auto &QuerriedAAs = QueryMap[ChangedAA];
2266     ChangedAAs.append(QuerriedAAs.begin(), QuerriedAAs.end());
2267   }
2268 
2269   LLVM_DEBUG({
2270     if (!Visited.empty())
2271       dbgs() << "\n[Attributor] Finalized " << Visited.size()
2272              << " abstract attributes.\n";
2273   });
2274 
2275   unsigned NumManifested = 0;
2276   unsigned NumAtFixpoint = 0;
2277   ChangeStatus ManifestChange = ChangeStatus::UNCHANGED;
2278   for (AbstractAttribute *AA : AllAbstractAttributes) {
2279     AbstractState &State = AA->getState();
2280 
2281     // If there is not already a fixpoint reached, we can now take the
2282     // optimistic state. This is correct because we enforced a pessimistic one
2283     // on abstract attributes that were transitively dependent on a changed one
2284     // already above.
2285     if (!State.isAtFixpoint())
2286       State.indicateOptimisticFixpoint();
2287 
2288     // If the state is invalid, we do not try to manifest it.
2289     if (!State.isValidState())
2290       continue;
2291 
2292     // Manifest the state and record if we changed the IR.
2293     ChangeStatus LocalChange = AA->manifest(*this);
2294     if (LocalChange == ChangeStatus::CHANGED && AreStatisticsEnabled())
2295       AA->trackStatistics();
2296 
2297     ManifestChange = ManifestChange | LocalChange;
2298 
2299     NumAtFixpoint++;
2300     NumManifested += (LocalChange == ChangeStatus::CHANGED);
2301   }
2302 
2303   (void)NumManifested;
2304   (void)NumAtFixpoint;
2305   LLVM_DEBUG(dbgs() << "\n[Attributor] Manifested " << NumManifested
2306                     << " arguments while " << NumAtFixpoint
2307                     << " were in a valid fixpoint state\n");
2308 
2309   // If verification is requested, we finished this run at a fixpoint, and the
2310   // IR was changed, we re-run the whole fixpoint analysis, starting at
2311   // re-initialization of the arguments. This re-run should not result in an IR
2312   // change. Though, the (virtual) state of attributes at the end of the re-run
2313   // might be more optimistic than the known state or the IR state if the better
2314   // state cannot be manifested.
2315   if (VerifyAttributor && FinishedAtFixpoint &&
2316       ManifestChange == ChangeStatus::CHANGED) {
2317     VerifyAttributor = false;
2318     ChangeStatus VerifyStatus = run();
2319     if (VerifyStatus != ChangeStatus::UNCHANGED)
2320       llvm_unreachable(
2321           "Attributor verification failed, re-run did result in an IR change "
2322           "even after a fixpoint was reached in the original run. (False "
2323           "positives possible!)");
2324     VerifyAttributor = true;
2325   }
2326 
2327   NumAttributesManifested += NumManifested;
2328   NumAttributesValidFixpoint += NumAtFixpoint;
2329 
2330   return ManifestChange;
2331 }
2332 
2333 /// Helper function that checks if an abstract attribute of type \p AAType
2334 /// should be created for \p V (with argument number \p ArgNo) and if so creates
2335 /// and registers it with the Attributor \p A.
2336 ///
2337 /// This method will look at the provided whitelist. If one is given and the
2338 /// kind \p AAType::ID is not contained, no abstract attribute is created.
2339 ///
2340 /// \returns The created abstract argument, or nullptr if none was created.
2341 template <typename AAType, typename ValueType, typename... ArgsTy>
2342 static AAType *checkAndRegisterAA(const Function &F, Attributor &A,
2343                                   DenseSet<const char *> *Whitelist,
2344                                   ValueType &V, int ArgNo, ArgsTy... Args) {
2345   if (Whitelist && !Whitelist->count(&AAType::ID))
2346     return nullptr;
2347 
2348   return &A.registerAA<AAType>(*new AAType(V, Args...), ArgNo);
2349 }
2350 
2351 void Attributor::identifyDefaultAbstractAttributes(
2352     Function &F, DenseSet<const char *> *Whitelist) {
2353 
2354   // Check for dead BasicBlocks in every function.
2355   // We need dead instruction detection because we do not want to deal with
2356   // broken IR in which SSA rules do not apply.
2357   checkAndRegisterAA<AAIsDeadFunction>(F, *this, /* Whitelist */ nullptr, F,
2358                                        -1);
2359 
2360   // Every function might be "will-return".
2361   checkAndRegisterAA<AAWillReturnFunction>(F, *this, Whitelist, F, -1);
2362 
2363   // Every function can be nounwind.
2364   checkAndRegisterAA<AANoUnwindFunction>(F, *this, Whitelist, F, -1);
2365 
2366   // Every function might be marked "nosync"
2367   checkAndRegisterAA<AANoSyncFunction>(F, *this, Whitelist, F, -1);
2368 
2369   // Every function might be "no-free".
2370   checkAndRegisterAA<AANoFreeFunction>(F, *this, Whitelist, F, -1);
2371 
2372   // Every function might be "no-return".
2373   checkAndRegisterAA<AANoReturnFunction>(F, *this, Whitelist, F, -1);
2374 
2375   // Return attributes are only appropriate if the return type is non void.
2376   Type *ReturnType = F.getReturnType();
2377   if (!ReturnType->isVoidTy()) {
2378     // Argument attribute "returned" --- Create only one per function even
2379     // though it is an argument attribute.
2380     checkAndRegisterAA<AAReturnedValuesFunction>(F, *this, Whitelist, F, -1);
2381 
2382     if (ReturnType->isPointerTy()) {
2383       // Every function with pointer return type might be marked align.
2384       checkAndRegisterAA<AAAlignReturned>(F, *this, Whitelist, F, -1);
2385 
2386       // Every function with pointer return type might be marked nonnull.
2387       checkAndRegisterAA<AANonNullReturned>(F, *this, Whitelist, F, -1);
2388 
2389       // Every function with pointer return type might be marked noalias.
2390       checkAndRegisterAA<AANoAliasReturned>(F, *this, Whitelist, F, -1);
2391 
2392       // Every function with pointer return type might be marked
2393       // dereferenceable.
2394       checkAndRegisterAA<AADereferenceableReturned>(F, *this, Whitelist, F, -1);
2395     }
2396   }
2397 
2398   for (Argument &Arg : F.args()) {
2399     if (Arg.getType()->isPointerTy()) {
2400       // Every argument with pointer type might be marked nonnull.
2401       checkAndRegisterAA<AANonNullArgument>(F, *this, Whitelist, Arg,
2402                                             Arg.getArgNo());
2403 
2404       // Every argument with pointer type might be marked dereferenceable.
2405       checkAndRegisterAA<AADereferenceableArgument>(F, *this, Whitelist, Arg,
2406                                                     Arg.getArgNo());
2407 
2408       // Every argument with pointer type might be marked align.
2409       checkAndRegisterAA<AAAlignArgument>(F, *this, Whitelist, Arg,
2410                                           Arg.getArgNo());
2411     }
2412   }
2413 
2414   // Walk all instructions to find more attribute opportunities and also
2415   // interesting instructions that might be queried by abstract attributes
2416   // during their initialization or update.
2417   auto &ReadOrWriteInsts = InfoCache.FuncRWInstsMap[&F];
2418   auto &InstOpcodeMap = InfoCache.FuncInstOpcodeMap[&F];
2419 
2420   for (Instruction &I : instructions(&F)) {
2421     bool IsInterestingOpcode = false;
2422 
2423     // To allow easy access to all instructions in a function with a given
2424     // opcode we store them in the InfoCache. As not all opcodes are interesting
2425     // to concrete attributes we only cache the ones that are as identified in
2426     // the following switch.
2427     // Note: There are no concrete attributes now so this is initially empty.
2428     switch (I.getOpcode()) {
2429     default:
2430       assert((!ImmutableCallSite(&I)) && (!isa<CallBase>(&I)) &&
2431              "New call site/base instruction type needs to be known int the "
2432              "attributor.");
2433       break;
2434     case Instruction::Call:
2435     case Instruction::CallBr:
2436     case Instruction::Invoke:
2437     case Instruction::CleanupRet:
2438     case Instruction::CatchSwitch:
2439     case Instruction::Resume:
2440     case Instruction::Ret:
2441       IsInterestingOpcode = true;
2442     }
2443     if (IsInterestingOpcode)
2444       InstOpcodeMap[I.getOpcode()].push_back(&I);
2445     if (I.mayReadOrWriteMemory())
2446       ReadOrWriteInsts.push_back(&I);
2447 
2448     CallSite CS(&I);
2449     if (CS && CS.getCalledFunction()) {
2450       for (int i = 0, e = CS.getCalledFunction()->arg_size(); i < e; i++) {
2451         if (!CS.getArgument(i)->getType()->isPointerTy())
2452           continue;
2453 
2454         // Call site argument attribute "non-null".
2455         checkAndRegisterAA<AANonNullCallSiteArgument>(F, *this, Whitelist, I, i,
2456                                                       i);
2457 
2458         // Call site argument attribute "dereferenceable".
2459         checkAndRegisterAA<AADereferenceableCallSiteArgument>(
2460             F, *this, Whitelist, I, i, i);
2461 
2462         // Call site argument attribute "align".
2463         checkAndRegisterAA<AAAlignCallSiteArgument>(F, *this, Whitelist, I, i,
2464                                                     i);
2465       }
2466     }
2467   }
2468 }
2469 
2470 /// Helpers to ease debugging through output streams and print calls.
2471 ///
2472 ///{
2473 raw_ostream &llvm::operator<<(raw_ostream &OS, ChangeStatus S) {
2474   return OS << (S == ChangeStatus::CHANGED ? "changed" : "unchanged");
2475 }
2476 
2477 raw_ostream &llvm::operator<<(raw_ostream &OS, IRPosition::Kind AP) {
2478   switch (AP) {
2479   case IRPosition::IRP_ARGUMENT:
2480     return OS << "arg";
2481   case IRPosition::IRP_CALL_SITE_ARGUMENT:
2482     return OS << "cs_arg";
2483   case IRPosition::IRP_FUNCTION:
2484     return OS << "fn";
2485   case IRPosition::IRP_RETURNED:
2486     return OS << "fn_ret";
2487   }
2488   llvm_unreachable("Unknown attribute position!");
2489 }
2490 
2491 raw_ostream &llvm::operator<<(raw_ostream &OS, const IRPosition &Pos) {
2492   const Value *AV = Pos.getAssociatedValue();
2493   return OS << "{" << Pos.getPositionKind() << ":"
2494             << (AV ? AV->getName() : "n/a") << " ["
2495             << Pos.getAnchorValue().getName() << "@" << Pos.getArgNo() << "]}";
2496 }
2497 
2498 raw_ostream &llvm::operator<<(raw_ostream &OS, const IntegerState &S) {
2499   return OS << "(" << S.getKnown() << "-" << S.getAssumed() << ")"
2500             << static_cast<const AbstractState &>(S);
2501 }
2502 
2503 raw_ostream &llvm::operator<<(raw_ostream &OS, const AbstractState &S) {
2504   return OS << (!S.isValidState() ? "top" : (S.isAtFixpoint() ? "fix" : ""));
2505 }
2506 
2507 raw_ostream &llvm::operator<<(raw_ostream &OS, const AbstractAttribute &AA) {
2508   AA.print(OS);
2509   return OS;
2510 }
2511 
2512 void AbstractAttribute::print(raw_ostream &OS) const {
2513   OS << "[P: " << getIRPosition() << "][" << getAsStr() << "][S: " << getState()
2514      << "]";
2515 }
2516 ///}
2517 
2518 /// ----------------------------------------------------------------------------
2519 ///                       Pass (Manager) Boilerplate
2520 /// ----------------------------------------------------------------------------
2521 
2522 static bool runAttributorOnModule(Module &M) {
2523   if (DisableAttributor)
2524     return false;
2525 
2526   LLVM_DEBUG(dbgs() << "[Attributor] Run on module with " << M.size()
2527                     << " functions.\n");
2528 
2529   // Create an Attributor and initially empty information cache that is filled
2530   // while we identify default attribute opportunities.
2531   InformationCache InfoCache(M.getDataLayout());
2532   Attributor A(InfoCache);
2533 
2534   for (Function &F : M) {
2535     // TODO: Not all attributes require an exact definition. Find a way to
2536     //       enable deduction for some but not all attributes in case the
2537     //       definition might be changed at runtime, see also
2538     //       http://lists.llvm.org/pipermail/llvm-dev/2018-February/121275.html.
2539     // TODO: We could always determine abstract attributes and if sufficient
2540     //       information was found we could duplicate the functions that do not
2541     //       have an exact definition.
2542     if (!F.hasExactDefinition()) {
2543       NumFnWithoutExactDefinition++;
2544       continue;
2545     }
2546 
2547     // For now we ignore naked and optnone functions.
2548     if (F.hasFnAttribute(Attribute::Naked) ||
2549         F.hasFnAttribute(Attribute::OptimizeNone))
2550       continue;
2551 
2552     NumFnWithExactDefinition++;
2553 
2554     // Populate the Attributor with abstract attribute opportunities in the
2555     // function and the information cache with IR information.
2556     A.identifyDefaultAbstractAttributes(F);
2557   }
2558 
2559   return A.run() == ChangeStatus::CHANGED;
2560 }
2561 
2562 PreservedAnalyses AttributorPass::run(Module &M, ModuleAnalysisManager &AM) {
2563   if (runAttributorOnModule(M)) {
2564     // FIXME: Think about passes we will preserve and add them here.
2565     return PreservedAnalyses::none();
2566   }
2567   return PreservedAnalyses::all();
2568 }
2569 
2570 namespace {
2571 
2572 struct AttributorLegacyPass : public ModulePass {
2573   static char ID;
2574 
2575   AttributorLegacyPass() : ModulePass(ID) {
2576     initializeAttributorLegacyPassPass(*PassRegistry::getPassRegistry());
2577   }
2578 
2579   bool runOnModule(Module &M) override {
2580     if (skipModule(M))
2581       return false;
2582     return runAttributorOnModule(M);
2583   }
2584 
2585   void getAnalysisUsage(AnalysisUsage &AU) const override {
2586     // FIXME: Think about passes we will preserve and add them here.
2587     AU.setPreservesCFG();
2588   }
2589 };
2590 
2591 } // end anonymous namespace
2592 
2593 Pass *llvm::createAttributorLegacyPass() { return new AttributorLegacyPass(); }
2594 
2595 char AttributorLegacyPass::ID = 0;
2596 
2597 const char AAReturnedValues::ID = 0;
2598 const char AANoUnwind::ID = 0;
2599 const char AANoSync::ID = 0;
2600 const char AANoFree::ID = 0;
2601 const char AANonNull::ID = 0;
2602 const char AANoRecurse::ID = 0;
2603 const char AAWillReturn::ID = 0;
2604 const char AANoAlias::ID = 0;
2605 const char AANoReturn::ID = 0;
2606 const char AAIsDead::ID = 0;
2607 const char AADereferenceable::ID = 0;
2608 const char AAAlign::ID = 0;
2609 
2610 INITIALIZE_PASS_BEGIN(AttributorLegacyPass, "attributor",
2611                       "Deduce and propagate attributes", false, false)
2612 INITIALIZE_PASS_END(AttributorLegacyPass, "attributor",
2613                     "Deduce and propagate attributes", false, false)
2614