1 //===- Attributor.cpp - Module-wide attribute deduction -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements an inter procedural pass that deduces and/or propagating 10 // attributes. This is done in an abstract interpretation style fixpoint 11 // iteration. See the Attributor.h file comment and the class descriptions in 12 // that file for more information. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "llvm/Transforms/IPO/Attributor.h" 17 18 #include "llvm/ADT/DepthFirstIterator.h" 19 #include "llvm/ADT/STLExtras.h" 20 #include "llvm/ADT/SetVector.h" 21 #include "llvm/ADT/SmallPtrSet.h" 22 #include "llvm/ADT/SmallVector.h" 23 #include "llvm/ADT/Statistic.h" 24 #include "llvm/Analysis/CaptureTracking.h" 25 #include "llvm/Analysis/GlobalsModRef.h" 26 #include "llvm/Analysis/Loads.h" 27 #include "llvm/Analysis/ValueTracking.h" 28 #include "llvm/IR/Argument.h" 29 #include "llvm/IR/Attributes.h" 30 #include "llvm/IR/CFG.h" 31 #include "llvm/IR/InstIterator.h" 32 #include "llvm/IR/IntrinsicInst.h" 33 #include "llvm/Support/CommandLine.h" 34 #include "llvm/Support/Debug.h" 35 #include "llvm/Support/raw_ostream.h" 36 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 37 #include "llvm/Transforms/Utils/Local.h" 38 39 #include <cassert> 40 41 using namespace llvm; 42 43 #define DEBUG_TYPE "attributor" 44 45 STATISTIC(NumFnWithExactDefinition, 46 "Number of function with exact definitions"); 47 STATISTIC(NumFnWithoutExactDefinition, 48 "Number of function without exact definitions"); 49 STATISTIC(NumAttributesTimedOut, 50 "Number of abstract attributes timed out before fixpoint"); 51 STATISTIC(NumAttributesValidFixpoint, 52 "Number of abstract attributes in a valid fixpoint state"); 53 STATISTIC(NumAttributesManifested, 54 "Number of abstract attributes manifested in IR"); 55 STATISTIC(NumFnNoUnwind, "Number of functions marked nounwind"); 56 57 STATISTIC(NumFnUniqueReturned, "Number of function with unique return"); 58 STATISTIC(NumFnKnownReturns, "Number of function with known return values"); 59 STATISTIC(NumFnArgumentReturned, 60 "Number of function arguments marked returned"); 61 STATISTIC(NumFnNoSync, "Number of functions marked nosync"); 62 STATISTIC(NumFnNoFree, "Number of functions marked nofree"); 63 STATISTIC(NumFnReturnedNonNull, 64 "Number of function return values marked nonnull"); 65 STATISTIC(NumFnArgumentNonNull, "Number of function arguments marked nonnull"); 66 STATISTIC(NumCSArgumentNonNull, "Number of call site arguments marked nonnull"); 67 STATISTIC(NumFnWillReturn, "Number of functions marked willreturn"); 68 STATISTIC(NumFnArgumentNoAlias, "Number of function arguments marked noalias"); 69 STATISTIC(NumFnReturnedDereferenceable, 70 "Number of function return values marked dereferenceable"); 71 STATISTIC(NumFnArgumentDereferenceable, 72 "Number of function arguments marked dereferenceable"); 73 STATISTIC(NumCSArgumentDereferenceable, 74 "Number of call site arguments marked dereferenceable"); 75 STATISTIC(NumFnReturnedAlign, "Number of function return values marked align"); 76 STATISTIC(NumFnArgumentAlign, "Number of function arguments marked align"); 77 STATISTIC(NumCSArgumentAlign, "Number of call site arguments marked align"); 78 79 // TODO: Determine a good default value. 80 // 81 // In the LLVM-TS and SPEC2006, 32 seems to not induce compile time overheads 82 // (when run with the first 5 abstract attributes). The results also indicate 83 // that we never reach 32 iterations but always find a fixpoint sooner. 84 // 85 // This will become more evolved once we perform two interleaved fixpoint 86 // iterations: bottom-up and top-down. 87 static cl::opt<unsigned> 88 MaxFixpointIterations("attributor-max-iterations", cl::Hidden, 89 cl::desc("Maximal number of fixpoint iterations."), 90 cl::init(32)); 91 92 static cl::opt<bool> DisableAttributor( 93 "attributor-disable", cl::Hidden, 94 cl::desc("Disable the attributor inter-procedural deduction pass."), 95 cl::init(true)); 96 97 static cl::opt<bool> VerifyAttributor( 98 "attributor-verify", cl::Hidden, 99 cl::desc("Verify the Attributor deduction and " 100 "manifestation of attributes -- may issue false-positive errors"), 101 cl::init(false)); 102 103 /// Logic operators for the change status enum class. 104 /// 105 ///{ 106 ChangeStatus llvm::operator|(ChangeStatus l, ChangeStatus r) { 107 return l == ChangeStatus::CHANGED ? l : r; 108 } 109 ChangeStatus llvm::operator&(ChangeStatus l, ChangeStatus r) { 110 return l == ChangeStatus::UNCHANGED ? l : r; 111 } 112 ///} 113 114 /// Helper to adjust the statistics. 115 static void bookkeeping(AbstractAttribute::ManifestPosition MP, 116 const Attribute &Attr) { 117 if (!AreStatisticsEnabled()) 118 return; 119 120 switch (Attr.getKindAsEnum()) { 121 case Attribute::Alignment: 122 switch (MP) { 123 case AbstractAttribute::MP_RETURNED: 124 NumFnReturnedAlign++; 125 break; 126 case AbstractAttribute::MP_ARGUMENT: 127 NumFnArgumentAlign++; 128 break; 129 case AbstractAttribute::MP_CALL_SITE_ARGUMENT: 130 NumCSArgumentAlign++; 131 break; 132 default: 133 break; 134 } 135 break; 136 case Attribute::Dereferenceable: 137 switch (MP) { 138 case AbstractAttribute::MP_RETURNED: 139 NumFnReturnedDereferenceable++; 140 break; 141 case AbstractAttribute::MP_ARGUMENT: 142 NumFnArgumentDereferenceable++; 143 break; 144 case AbstractAttribute::MP_CALL_SITE_ARGUMENT: 145 NumCSArgumentDereferenceable++; 146 break; 147 default: 148 break; 149 } 150 break; 151 case Attribute::NoUnwind: 152 NumFnNoUnwind++; 153 return; 154 case Attribute::Returned: 155 NumFnArgumentReturned++; 156 return; 157 case Attribute::NoSync: 158 NumFnNoSync++; 159 break; 160 case Attribute::NoFree: 161 NumFnNoFree++; 162 break; 163 case Attribute::NonNull: 164 switch (MP) { 165 case AbstractAttribute::MP_RETURNED: 166 NumFnReturnedNonNull++; 167 break; 168 case AbstractAttribute::MP_ARGUMENT: 169 NumFnArgumentNonNull++; 170 break; 171 case AbstractAttribute::MP_CALL_SITE_ARGUMENT: 172 NumCSArgumentNonNull++; 173 break; 174 default: 175 break; 176 } 177 break; 178 case Attribute::WillReturn: 179 NumFnWillReturn++; 180 break; 181 case Attribute::NoAlias: 182 NumFnArgumentNoAlias++; 183 return; 184 default: 185 return; 186 } 187 } 188 189 template <typename StateTy> 190 using followValueCB_t = std::function<bool(Value *, StateTy &State)>; 191 template <typename StateTy> 192 using visitValueCB_t = std::function<void(Value *, StateTy &State)>; 193 194 /// Recursively visit all values that might become \p InitV at some point. This 195 /// will be done by looking through cast instructions, selects, phis, and calls 196 /// with the "returned" attribute. The callback \p FollowValueCB is asked before 197 /// a potential origin value is looked at. If no \p FollowValueCB is passed, a 198 /// default one is used that will make sure we visit every value only once. Once 199 /// we cannot look through the value any further, the callback \p VisitValueCB 200 /// is invoked and passed the current value and the \p State. To limit how much 201 /// effort is invested, we will never visit more than \p MaxValues values. 202 template <typename StateTy> 203 static bool genericValueTraversal( 204 Value *InitV, StateTy &State, visitValueCB_t<StateTy> &VisitValueCB, 205 followValueCB_t<StateTy> *FollowValueCB = nullptr, int MaxValues = 8) { 206 207 SmallPtrSet<Value *, 16> Visited; 208 followValueCB_t<bool> DefaultFollowValueCB = [&](Value *Val, bool &) { 209 return Visited.insert(Val).second; 210 }; 211 212 if (!FollowValueCB) 213 FollowValueCB = &DefaultFollowValueCB; 214 215 SmallVector<Value *, 16> Worklist; 216 Worklist.push_back(InitV); 217 218 int Iteration = 0; 219 do { 220 Value *V = Worklist.pop_back_val(); 221 222 // Check if we should process the current value. To prevent endless 223 // recursion keep a record of the values we followed! 224 if (!(*FollowValueCB)(V, State)) 225 continue; 226 227 // Make sure we limit the compile time for complex expressions. 228 if (Iteration++ >= MaxValues) 229 return false; 230 231 // Explicitly look through calls with a "returned" attribute if we do 232 // not have a pointer as stripPointerCasts only works on them. 233 if (V->getType()->isPointerTy()) { 234 V = V->stripPointerCasts(); 235 } else { 236 CallSite CS(V); 237 if (CS && CS.getCalledFunction()) { 238 Value *NewV = nullptr; 239 for (Argument &Arg : CS.getCalledFunction()->args()) 240 if (Arg.hasReturnedAttr()) { 241 NewV = CS.getArgOperand(Arg.getArgNo()); 242 break; 243 } 244 if (NewV) { 245 Worklist.push_back(NewV); 246 continue; 247 } 248 } 249 } 250 251 // Look through select instructions, visit both potential values. 252 if (auto *SI = dyn_cast<SelectInst>(V)) { 253 Worklist.push_back(SI->getTrueValue()); 254 Worklist.push_back(SI->getFalseValue()); 255 continue; 256 } 257 258 // Look through phi nodes, visit all operands. 259 if (auto *PHI = dyn_cast<PHINode>(V)) { 260 Worklist.append(PHI->op_begin(), PHI->op_end()); 261 continue; 262 } 263 264 // Once a leaf is reached we inform the user through the callback. 265 VisitValueCB(V, State); 266 } while (!Worklist.empty()); 267 268 // All values have been visited. 269 return true; 270 } 271 272 /// Helper to identify the correct offset into an attribute list. 273 static unsigned getAttrIndex(AbstractAttribute::ManifestPosition MP, 274 unsigned ArgNo = 0) { 275 switch (MP) { 276 case AbstractAttribute::MP_ARGUMENT: 277 case AbstractAttribute::MP_CALL_SITE_ARGUMENT: 278 return ArgNo + AttributeList::FirstArgIndex; 279 case AbstractAttribute::MP_FUNCTION: 280 return AttributeList::FunctionIndex; 281 case AbstractAttribute::MP_RETURNED: 282 return AttributeList::ReturnIndex; 283 } 284 llvm_unreachable("Unknown manifest position!"); 285 } 286 287 /// Return true if \p New is equal or worse than \p Old. 288 static bool isEqualOrWorse(const Attribute &New, const Attribute &Old) { 289 if (!Old.isIntAttribute()) 290 return true; 291 292 return Old.getValueAsInt() >= New.getValueAsInt(); 293 } 294 295 /// Return true if the information provided by \p Attr was added to the 296 /// attribute list \p Attrs. This is only the case if it was not already present 297 /// in \p Attrs at the position describe by \p MP and \p ArgNo. 298 static bool addIfNotExistent(LLVMContext &Ctx, const Attribute &Attr, 299 AttributeList &Attrs, 300 AbstractAttribute::ManifestPosition MP, 301 unsigned ArgNo = 0) { 302 unsigned AttrIdx = getAttrIndex(MP, ArgNo); 303 304 if (Attr.isEnumAttribute()) { 305 Attribute::AttrKind Kind = Attr.getKindAsEnum(); 306 if (Attrs.hasAttribute(AttrIdx, Kind)) 307 if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind))) 308 return false; 309 Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr); 310 return true; 311 } 312 if (Attr.isStringAttribute()) { 313 StringRef Kind = Attr.getKindAsString(); 314 if (Attrs.hasAttribute(AttrIdx, Kind)) 315 if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind))) 316 return false; 317 Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr); 318 return true; 319 } 320 if (Attr.isIntAttribute()) { 321 Attribute::AttrKind Kind = Attr.getKindAsEnum(); 322 if (Attrs.hasAttribute(AttrIdx, Kind)) 323 if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind))) 324 return false; 325 Attrs = Attrs.removeAttribute(Ctx, AttrIdx, Kind); 326 Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr); 327 return true; 328 } 329 330 llvm_unreachable("Expected enum or string attribute!"); 331 } 332 333 ChangeStatus AbstractAttribute::update(Attributor &A) { 334 ChangeStatus HasChanged = ChangeStatus::UNCHANGED; 335 if (getState().isAtFixpoint()) 336 return HasChanged; 337 338 LLVM_DEBUG(dbgs() << "[Attributor] Update: " << *this << "\n"); 339 340 HasChanged = updateImpl(A); 341 342 LLVM_DEBUG(dbgs() << "[Attributor] Update " << HasChanged << " " << *this 343 << "\n"); 344 345 return HasChanged; 346 } 347 348 ChangeStatus AbstractAttribute::manifest(Attributor &A) { 349 assert(getState().isValidState() && 350 "Attempted to manifest an invalid state!"); 351 assert(getAssociatedValue() && 352 "Attempted to manifest an attribute without associated value!"); 353 354 ChangeStatus HasChanged = ChangeStatus::UNCHANGED; 355 SmallVector<Attribute, 4> DeducedAttrs; 356 getDeducedAttributes(DeducedAttrs); 357 358 Function &ScopeFn = getAnchorScope(); 359 LLVMContext &Ctx = ScopeFn.getContext(); 360 ManifestPosition MP = getManifestPosition(); 361 362 AttributeList Attrs; 363 SmallVector<unsigned, 4> ArgNos; 364 365 // In the following some generic code that will manifest attributes in 366 // DeducedAttrs if they improve the current IR. Due to the different 367 // annotation positions we use the underlying AttributeList interface. 368 // Note that MP_CALL_SITE_ARGUMENT can annotate multiple locations. 369 370 switch (MP) { 371 case MP_ARGUMENT: 372 ArgNos.push_back(cast<Argument>(getAssociatedValue())->getArgNo()); 373 Attrs = ScopeFn.getAttributes(); 374 break; 375 case MP_FUNCTION: 376 case MP_RETURNED: 377 ArgNos.push_back(0); 378 Attrs = ScopeFn.getAttributes(); 379 break; 380 case MP_CALL_SITE_ARGUMENT: { 381 CallSite CS(&getAnchoredValue()); 382 for (unsigned u = 0, e = CS.getNumArgOperands(); u != e; u++) 383 if (CS.getArgOperand(u) == getAssociatedValue()) 384 ArgNos.push_back(u); 385 Attrs = CS.getAttributes(); 386 } 387 } 388 389 for (const Attribute &Attr : DeducedAttrs) { 390 for (unsigned ArgNo : ArgNos) { 391 if (!addIfNotExistent(Ctx, Attr, Attrs, MP, ArgNo)) 392 continue; 393 394 HasChanged = ChangeStatus::CHANGED; 395 bookkeeping(MP, Attr); 396 } 397 } 398 399 if (HasChanged == ChangeStatus::UNCHANGED) 400 return HasChanged; 401 402 switch (MP) { 403 case MP_ARGUMENT: 404 case MP_FUNCTION: 405 case MP_RETURNED: 406 ScopeFn.setAttributes(Attrs); 407 break; 408 case MP_CALL_SITE_ARGUMENT: 409 CallSite(&getAnchoredValue()).setAttributes(Attrs); 410 } 411 412 return HasChanged; 413 } 414 415 Function &AbstractAttribute::getAnchorScope() { 416 Value &V = getAnchoredValue(); 417 if (isa<Function>(V)) 418 return cast<Function>(V); 419 if (isa<Argument>(V)) 420 return *cast<Argument>(V).getParent(); 421 if (isa<Instruction>(V)) 422 return *cast<Instruction>(V).getFunction(); 423 llvm_unreachable("No scope for anchored value found!"); 424 } 425 426 const Function &AbstractAttribute::getAnchorScope() const { 427 return const_cast<AbstractAttribute *>(this)->getAnchorScope(); 428 } 429 430 // Helper function that returns argument index of value. 431 // If the value is not an argument, this returns -1. 432 static int getArgNo(Value &V) { 433 if (auto *Arg = dyn_cast<Argument>(&V)) 434 return Arg->getArgNo(); 435 return -1; 436 } 437 438 /// -----------------------NoUnwind Function Attribute-------------------------- 439 440 struct AANoUnwindFunction : AANoUnwind, BooleanState { 441 442 AANoUnwindFunction(Function &F, InformationCache &InfoCache) 443 : AANoUnwind(F, InfoCache) {} 444 445 /// See AbstractAttribute::getState() 446 /// { 447 AbstractState &getState() override { return *this; } 448 const AbstractState &getState() const override { return *this; } 449 /// } 450 451 /// See AbstractAttribute::getManifestPosition(). 452 ManifestPosition getManifestPosition() const override { return MP_FUNCTION; } 453 454 const std::string getAsStr() const override { 455 return getAssumed() ? "nounwind" : "may-unwind"; 456 } 457 458 /// See AbstractAttribute::updateImpl(...). 459 ChangeStatus updateImpl(Attributor &A) override; 460 461 /// See AANoUnwind::isAssumedNoUnwind(). 462 bool isAssumedNoUnwind() const override { return getAssumed(); } 463 464 /// See AANoUnwind::isKnownNoUnwind(). 465 bool isKnownNoUnwind() const override { return getKnown(); } 466 }; 467 468 ChangeStatus AANoUnwindFunction::updateImpl(Attributor &A) { 469 Function &F = getAnchorScope(); 470 471 // The map from instruction opcodes to those instructions in the function. 472 auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F); 473 auto Opcodes = { 474 (unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr, 475 (unsigned)Instruction::Call, (unsigned)Instruction::CleanupRet, 476 (unsigned)Instruction::CatchSwitch, (unsigned)Instruction::Resume}; 477 478 auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F); 479 480 for (unsigned Opcode : Opcodes) { 481 for (Instruction *I : OpcodeInstMap[Opcode]) { 482 // Skip dead instructions. 483 if (LivenessAA && LivenessAA->isAssumedDead(I)) 484 continue; 485 486 if (!I->mayThrow()) 487 continue; 488 489 auto *NoUnwindAA = A.getAAFor<AANoUnwind>(*this, *I); 490 491 if (!NoUnwindAA || !NoUnwindAA->isAssumedNoUnwind()) { 492 indicatePessimisticFixpoint(); 493 return ChangeStatus::CHANGED; 494 } 495 } 496 } 497 return ChangeStatus::UNCHANGED; 498 } 499 500 /// --------------------- Function Return Values ------------------------------- 501 502 /// "Attribute" that collects all potential returned values and the return 503 /// instructions that they arise from. 504 /// 505 /// If there is a unique returned value R, the manifest method will: 506 /// - mark R with the "returned" attribute, if R is an argument. 507 class AAReturnedValuesImpl final : public AAReturnedValues, AbstractState { 508 509 /// Mapping of values potentially returned by the associated function to the 510 /// return instructions that might return them. 511 DenseMap<Value *, SmallPtrSet<ReturnInst *, 2>> ReturnedValues; 512 513 /// State flags 514 /// 515 ///{ 516 bool IsFixed; 517 bool IsValidState; 518 bool HasOverdefinedReturnedCalls; 519 ///} 520 521 /// Collect values that could become \p V in the set \p Values, each mapped to 522 /// \p ReturnInsts. 523 void collectValuesRecursively( 524 Attributor &A, Value *V, SmallPtrSetImpl<ReturnInst *> &ReturnInsts, 525 DenseMap<Value *, SmallPtrSet<ReturnInst *, 2>> &Values) { 526 527 visitValueCB_t<bool> VisitValueCB = [&](Value *Val, bool &) { 528 assert(!isa<Instruction>(Val) || 529 &getAnchorScope() == cast<Instruction>(Val)->getFunction()); 530 Values[Val].insert(ReturnInsts.begin(), ReturnInsts.end()); 531 }; 532 533 bool UnusedBool; 534 bool Success = genericValueTraversal(V, UnusedBool, VisitValueCB); 535 536 // If we did abort the above traversal we haven't see all the values. 537 // Consequently, we cannot know if the information we would derive is 538 // accurate so we give up early. 539 if (!Success) 540 indicatePessimisticFixpoint(); 541 } 542 543 public: 544 /// See AbstractAttribute::AbstractAttribute(...). 545 AAReturnedValuesImpl(Function &F, InformationCache &InfoCache) 546 : AAReturnedValues(F, InfoCache) { 547 // We do not have an associated argument yet. 548 AssociatedVal = nullptr; 549 } 550 551 /// See AbstractAttribute::initialize(...). 552 void initialize(Attributor &A) override { 553 // Reset the state. 554 AssociatedVal = nullptr; 555 IsFixed = false; 556 IsValidState = true; 557 HasOverdefinedReturnedCalls = false; 558 ReturnedValues.clear(); 559 560 Function &F = cast<Function>(getAnchoredValue()); 561 562 // The map from instruction opcodes to those instructions in the function. 563 auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F); 564 565 // Look through all arguments, if one is marked as returned we are done. 566 for (Argument &Arg : F.args()) { 567 if (Arg.hasReturnedAttr()) { 568 569 auto &ReturnInstSet = ReturnedValues[&Arg]; 570 for (Instruction *RI : OpcodeInstMap[Instruction::Ret]) 571 ReturnInstSet.insert(cast<ReturnInst>(RI)); 572 573 indicateOptimisticFixpoint(); 574 return; 575 } 576 } 577 578 // If no argument was marked as returned we look at all return instructions 579 // and collect potentially returned values. 580 for (Instruction *RI : OpcodeInstMap[Instruction::Ret]) { 581 SmallPtrSet<ReturnInst *, 1> RISet({cast<ReturnInst>(RI)}); 582 collectValuesRecursively(A, cast<ReturnInst>(RI)->getReturnValue(), RISet, 583 ReturnedValues); 584 } 585 } 586 587 /// See AbstractAttribute::manifest(...). 588 ChangeStatus manifest(Attributor &A) override; 589 590 /// See AbstractAttribute::getState(...). 591 AbstractState &getState() override { return *this; } 592 593 /// See AbstractAttribute::getState(...). 594 const AbstractState &getState() const override { return *this; } 595 596 /// See AbstractAttribute::getManifestPosition(). 597 ManifestPosition getManifestPosition() const override { return MP_ARGUMENT; } 598 599 /// See AbstractAttribute::updateImpl(Attributor &A). 600 ChangeStatus updateImpl(Attributor &A) override; 601 602 /// Return the number of potential return values, -1 if unknown. 603 size_t getNumReturnValues() const { 604 return isValidState() ? ReturnedValues.size() : -1; 605 } 606 607 /// Return an assumed unique return value if a single candidate is found. If 608 /// there cannot be one, return a nullptr. If it is not clear yet, return the 609 /// Optional::NoneType. 610 Optional<Value *> 611 getAssumedUniqueReturnValue(const AAIsDead *LivenessAA) const; 612 613 /// See AbstractState::checkForallReturnedValues(...). 614 bool checkForallReturnedValues( 615 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> &Pred) 616 const override; 617 618 /// Pretty print the attribute similar to the IR representation. 619 const std::string getAsStr() const override; 620 621 /// See AbstractState::isAtFixpoint(). 622 bool isAtFixpoint() const override { return IsFixed; } 623 624 /// See AbstractState::isValidState(). 625 bool isValidState() const override { return IsValidState; } 626 627 /// See AbstractState::indicateOptimisticFixpoint(...). 628 void indicateOptimisticFixpoint() override { 629 IsFixed = true; 630 IsValidState &= true; 631 } 632 633 void indicatePessimisticFixpoint() override { 634 IsFixed = true; 635 IsValidState = false; 636 } 637 }; 638 639 ChangeStatus AAReturnedValuesImpl::manifest(Attributor &A) { 640 ChangeStatus Changed = ChangeStatus::UNCHANGED; 641 642 // Bookkeeping. 643 assert(isValidState()); 644 NumFnKnownReturns++; 645 646 auto *LivenessAA = A.getAAFor<AAIsDead>(*this, getAnchorScope()); 647 648 // Check if we have an assumed unique return value that we could manifest. 649 Optional<Value *> UniqueRV = getAssumedUniqueReturnValue(LivenessAA); 650 651 if (!UniqueRV.hasValue() || !UniqueRV.getValue()) 652 return Changed; 653 654 // Bookkeeping. 655 NumFnUniqueReturned++; 656 657 // If the assumed unique return value is an argument, annotate it. 658 if (auto *UniqueRVArg = dyn_cast<Argument>(UniqueRV.getValue())) { 659 AssociatedVal = UniqueRVArg; 660 Changed = AbstractAttribute::manifest(A) | Changed; 661 } 662 663 return Changed; 664 } 665 666 const std::string AAReturnedValuesImpl::getAsStr() const { 667 return (isAtFixpoint() ? "returns(#" : "may-return(#") + 668 (isValidState() ? std::to_string(getNumReturnValues()) : "?") + ")"; 669 } 670 671 Optional<Value *> AAReturnedValuesImpl::getAssumedUniqueReturnValue( 672 const AAIsDead *LivenessAA) const { 673 // If checkForallReturnedValues provides a unique value, ignoring potential 674 // undef values that can also be present, it is assumed to be the actual 675 // return value and forwarded to the caller of this method. If there are 676 // multiple, a nullptr is returned indicating there cannot be a unique 677 // returned value. 678 Optional<Value *> UniqueRV; 679 680 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred = 681 [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool { 682 // If all ReturnInsts are dead, then ReturnValue is dead as well 683 // and can be ignored. 684 if (LivenessAA && 685 !LivenessAA->isLiveInstSet(RetInsts.begin(), RetInsts.end())) 686 return true; 687 688 // If we found a second returned value and neither the current nor the saved 689 // one is an undef, there is no unique returned value. Undefs are special 690 // since we can pretend they have any value. 691 if (UniqueRV.hasValue() && UniqueRV != &RV && 692 !(isa<UndefValue>(RV) || isa<UndefValue>(UniqueRV.getValue()))) { 693 UniqueRV = nullptr; 694 return false; 695 } 696 697 // Do not overwrite a value with an undef. 698 if (!UniqueRV.hasValue() || !isa<UndefValue>(RV)) 699 UniqueRV = &RV; 700 701 return true; 702 }; 703 704 if (!checkForallReturnedValues(Pred)) 705 UniqueRV = nullptr; 706 707 return UniqueRV; 708 } 709 710 bool AAReturnedValuesImpl::checkForallReturnedValues( 711 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> &Pred) 712 const { 713 if (!isValidState()) 714 return false; 715 716 // Check all returned values but ignore call sites as long as we have not 717 // encountered an overdefined one during an update. 718 for (auto &It : ReturnedValues) { 719 Value *RV = It.first; 720 const SmallPtrSetImpl<ReturnInst *> &RetInsts = It.second; 721 722 ImmutableCallSite ICS(RV); 723 if (ICS && !HasOverdefinedReturnedCalls) 724 continue; 725 726 if (!Pred(*RV, RetInsts)) 727 return false; 728 } 729 730 return true; 731 } 732 733 ChangeStatus AAReturnedValuesImpl::updateImpl(Attributor &A) { 734 735 // Check if we know of any values returned by the associated function, 736 // if not, we are done. 737 if (getNumReturnValues() == 0) { 738 indicateOptimisticFixpoint(); 739 return ChangeStatus::UNCHANGED; 740 } 741 742 // Check if any of the returned values is a call site we can refine. 743 decltype(ReturnedValues) AddRVs; 744 bool HasCallSite = false; 745 746 // Keep track of any change to trigger updates on dependent attributes. 747 ChangeStatus Changed = ChangeStatus::UNCHANGED; 748 749 auto *LivenessAA = A.getAAFor<AAIsDead>(*this, getAnchorScope()); 750 751 // Look at all returned call sites. 752 for (auto &It : ReturnedValues) { 753 SmallPtrSet<ReturnInst *, 2> &ReturnInsts = It.second; 754 Value *RV = It.first; 755 756 // Ignore dead ReturnValues. 757 if (LivenessAA && 758 !LivenessAA->isLiveInstSet(ReturnInsts.begin(), ReturnInsts.end())) 759 continue; 760 761 LLVM_DEBUG(dbgs() << "[AAReturnedValues] Potentially returned value " << *RV 762 << "\n"); 763 764 // Only call sites can change during an update, ignore the rest. 765 CallSite RetCS(RV); 766 if (!RetCS) 767 continue; 768 769 // For now, any call site we see will prevent us from directly fixing the 770 // state. However, if the information on the callees is fixed, the call 771 // sites will be removed and we will fix the information for this state. 772 HasCallSite = true; 773 774 // Try to find a assumed unique return value for the called function. 775 auto *RetCSAA = A.getAAFor<AAReturnedValuesImpl>(*this, *RV); 776 if (!RetCSAA) { 777 if (!HasOverdefinedReturnedCalls) 778 Changed = ChangeStatus::CHANGED; 779 HasOverdefinedReturnedCalls = true; 780 LLVM_DEBUG(dbgs() << "[AAReturnedValues] Returned call site (" << *RV 781 << ") with " << (RetCSAA ? "invalid" : "no") 782 << " associated state\n"); 783 continue; 784 } 785 786 auto *LivenessCSAA = A.getAAFor<AAIsDead>(*this, RetCSAA->getAnchorScope()); 787 788 // Try to find a assumed unique return value for the called function. 789 Optional<Value *> AssumedUniqueRV = 790 RetCSAA->getAssumedUniqueReturnValue(LivenessCSAA); 791 792 // If no assumed unique return value was found due to the lack of 793 // candidates, we may need to resolve more calls (through more update 794 // iterations) or the called function will not return. Either way, we simply 795 // stick with the call sites as return values. Because there were not 796 // multiple possibilities, we do not treat it as overdefined. 797 if (!AssumedUniqueRV.hasValue()) 798 continue; 799 800 // If multiple, non-refinable values were found, there cannot be a unique 801 // return value for the called function. The returned call is overdefined! 802 if (!AssumedUniqueRV.getValue()) { 803 if (!HasOverdefinedReturnedCalls) 804 Changed = ChangeStatus::CHANGED; 805 HasOverdefinedReturnedCalls = true; 806 LLVM_DEBUG(dbgs() << "[AAReturnedValues] Returned call site has multiple " 807 "potentially returned values\n"); 808 continue; 809 } 810 811 LLVM_DEBUG({ 812 bool UniqueRVIsKnown = RetCSAA->isAtFixpoint(); 813 dbgs() << "[AAReturnedValues] Returned call site " 814 << (UniqueRVIsKnown ? "known" : "assumed") 815 << " unique return value: " << *AssumedUniqueRV << "\n"; 816 }); 817 818 // The assumed unique return value. 819 Value *AssumedRetVal = AssumedUniqueRV.getValue(); 820 821 // If the assumed unique return value is an argument, lookup the matching 822 // call site operand and recursively collect new returned values. 823 // If it is not an argument, it is just put into the set of returned values 824 // as we would have already looked through casts, phis, and similar values. 825 if (Argument *AssumedRetArg = dyn_cast<Argument>(AssumedRetVal)) 826 collectValuesRecursively(A, 827 RetCS.getArgOperand(AssumedRetArg->getArgNo()), 828 ReturnInsts, AddRVs); 829 else 830 AddRVs[AssumedRetVal].insert(ReturnInsts.begin(), ReturnInsts.end()); 831 } 832 833 for (auto &It : AddRVs) { 834 assert(!It.second.empty() && "Entry does not add anything."); 835 auto &ReturnInsts = ReturnedValues[It.first]; 836 for (ReturnInst *RI : It.second) 837 if (ReturnInsts.insert(RI).second) { 838 LLVM_DEBUG(dbgs() << "[AAReturnedValues] Add new returned value " 839 << *It.first << " => " << *RI << "\n"); 840 Changed = ChangeStatus::CHANGED; 841 } 842 } 843 844 // If there is no call site in the returned values we are done. 845 if (!HasCallSite) { 846 indicateOptimisticFixpoint(); 847 return ChangeStatus::CHANGED; 848 } 849 850 return Changed; 851 } 852 853 /// ------------------------ NoSync Function Attribute ------------------------- 854 855 struct AANoSyncFunction : AANoSync, BooleanState { 856 857 AANoSyncFunction(Function &F, InformationCache &InfoCache) 858 : AANoSync(F, InfoCache) {} 859 860 /// See AbstractAttribute::getState() 861 /// { 862 AbstractState &getState() override { return *this; } 863 const AbstractState &getState() const override { return *this; } 864 /// } 865 866 /// See AbstractAttribute::getManifestPosition(). 867 ManifestPosition getManifestPosition() const override { return MP_FUNCTION; } 868 869 const std::string getAsStr() const override { 870 return getAssumed() ? "nosync" : "may-sync"; 871 } 872 873 /// See AbstractAttribute::updateImpl(...). 874 ChangeStatus updateImpl(Attributor &A) override; 875 876 /// See AANoSync::isAssumedNoSync() 877 bool isAssumedNoSync() const override { return getAssumed(); } 878 879 /// See AANoSync::isKnownNoSync() 880 bool isKnownNoSync() const override { return getKnown(); } 881 882 /// Helper function used to determine whether an instruction is non-relaxed 883 /// atomic. In other words, if an atomic instruction does not have unordered 884 /// or monotonic ordering 885 static bool isNonRelaxedAtomic(Instruction *I); 886 887 /// Helper function used to determine whether an instruction is volatile. 888 static bool isVolatile(Instruction *I); 889 890 /// Helper function uset to check if intrinsic is volatile (memcpy, memmove, 891 /// memset). 892 static bool isNoSyncIntrinsic(Instruction *I); 893 }; 894 895 bool AANoSyncFunction::isNonRelaxedAtomic(Instruction *I) { 896 if (!I->isAtomic()) 897 return false; 898 899 AtomicOrdering Ordering; 900 switch (I->getOpcode()) { 901 case Instruction::AtomicRMW: 902 Ordering = cast<AtomicRMWInst>(I)->getOrdering(); 903 break; 904 case Instruction::Store: 905 Ordering = cast<StoreInst>(I)->getOrdering(); 906 break; 907 case Instruction::Load: 908 Ordering = cast<LoadInst>(I)->getOrdering(); 909 break; 910 case Instruction::Fence: { 911 auto *FI = cast<FenceInst>(I); 912 if (FI->getSyncScopeID() == SyncScope::SingleThread) 913 return false; 914 Ordering = FI->getOrdering(); 915 break; 916 } 917 case Instruction::AtomicCmpXchg: { 918 AtomicOrdering Success = cast<AtomicCmpXchgInst>(I)->getSuccessOrdering(); 919 AtomicOrdering Failure = cast<AtomicCmpXchgInst>(I)->getFailureOrdering(); 920 // Only if both are relaxed, than it can be treated as relaxed. 921 // Otherwise it is non-relaxed. 922 if (Success != AtomicOrdering::Unordered && 923 Success != AtomicOrdering::Monotonic) 924 return true; 925 if (Failure != AtomicOrdering::Unordered && 926 Failure != AtomicOrdering::Monotonic) 927 return true; 928 return false; 929 } 930 default: 931 llvm_unreachable( 932 "New atomic operations need to be known in the attributor."); 933 } 934 935 // Relaxed. 936 if (Ordering == AtomicOrdering::Unordered || 937 Ordering == AtomicOrdering::Monotonic) 938 return false; 939 return true; 940 } 941 942 /// Checks if an intrinsic is nosync. Currently only checks mem* intrinsics. 943 /// FIXME: We should ipmrove the handling of intrinsics. 944 bool AANoSyncFunction::isNoSyncIntrinsic(Instruction *I) { 945 if (auto *II = dyn_cast<IntrinsicInst>(I)) { 946 switch (II->getIntrinsicID()) { 947 /// Element wise atomic memory intrinsics are can only be unordered, 948 /// therefore nosync. 949 case Intrinsic::memset_element_unordered_atomic: 950 case Intrinsic::memmove_element_unordered_atomic: 951 case Intrinsic::memcpy_element_unordered_atomic: 952 return true; 953 case Intrinsic::memset: 954 case Intrinsic::memmove: 955 case Intrinsic::memcpy: 956 if (!cast<MemIntrinsic>(II)->isVolatile()) 957 return true; 958 return false; 959 default: 960 return false; 961 } 962 } 963 return false; 964 } 965 966 bool AANoSyncFunction::isVolatile(Instruction *I) { 967 assert(!ImmutableCallSite(I) && !isa<CallBase>(I) && 968 "Calls should not be checked here"); 969 970 switch (I->getOpcode()) { 971 case Instruction::AtomicRMW: 972 return cast<AtomicRMWInst>(I)->isVolatile(); 973 case Instruction::Store: 974 return cast<StoreInst>(I)->isVolatile(); 975 case Instruction::Load: 976 return cast<LoadInst>(I)->isVolatile(); 977 case Instruction::AtomicCmpXchg: 978 return cast<AtomicCmpXchgInst>(I)->isVolatile(); 979 default: 980 return false; 981 } 982 } 983 984 ChangeStatus AANoSyncFunction::updateImpl(Attributor &A) { 985 Function &F = getAnchorScope(); 986 987 auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F); 988 989 /// We are looking for volatile instructions or Non-Relaxed atomics. 990 /// FIXME: We should ipmrove the handling of intrinsics. 991 for (Instruction *I : InfoCache.getReadOrWriteInstsForFunction(F)) { 992 // Skip assumed dead instructions. 993 if (LivenessAA && LivenessAA->isAssumedDead(I)) 994 continue; 995 996 ImmutableCallSite ICS(I); 997 auto *NoSyncAA = A.getAAFor<AANoSyncFunction>(*this, *I); 998 999 if (isa<IntrinsicInst>(I) && isNoSyncIntrinsic(I)) 1000 continue; 1001 1002 if (ICS && (!NoSyncAA || !NoSyncAA->isAssumedNoSync()) && 1003 !ICS.hasFnAttr(Attribute::NoSync)) { 1004 indicatePessimisticFixpoint(); 1005 return ChangeStatus::CHANGED; 1006 } 1007 1008 if (ICS) 1009 continue; 1010 1011 if (!isVolatile(I) && !isNonRelaxedAtomic(I)) 1012 continue; 1013 1014 indicatePessimisticFixpoint(); 1015 return ChangeStatus::CHANGED; 1016 } 1017 1018 auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F); 1019 auto Opcodes = {(unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr, 1020 (unsigned)Instruction::Call}; 1021 1022 for (unsigned Opcode : Opcodes) { 1023 for (Instruction *I : OpcodeInstMap[Opcode]) { 1024 // Skip assumed dead instructions. 1025 if (LivenessAA && LivenessAA->isAssumedDead(I)) 1026 continue; 1027 // At this point we handled all read/write effects and they are all 1028 // nosync, so they can be skipped. 1029 if (I->mayReadOrWriteMemory()) 1030 continue; 1031 1032 ImmutableCallSite ICS(I); 1033 1034 // non-convergent and readnone imply nosync. 1035 if (!ICS.isConvergent()) 1036 continue; 1037 1038 indicatePessimisticFixpoint(); 1039 return ChangeStatus::CHANGED; 1040 } 1041 } 1042 1043 return ChangeStatus::UNCHANGED; 1044 } 1045 1046 /// ------------------------ No-Free Attributes ---------------------------- 1047 1048 struct AANoFreeFunction : AbstractAttribute, BooleanState { 1049 1050 /// See AbstractAttribute::AbstractAttribute(...). 1051 AANoFreeFunction(Function &F, InformationCache &InfoCache) 1052 : AbstractAttribute(F, InfoCache) {} 1053 1054 /// See AbstractAttribute::getState() 1055 ///{ 1056 AbstractState &getState() override { return *this; } 1057 const AbstractState &getState() const override { return *this; } 1058 ///} 1059 1060 /// See AbstractAttribute::getManifestPosition(). 1061 ManifestPosition getManifestPosition() const override { return MP_FUNCTION; } 1062 1063 /// See AbstractAttribute::getAsStr(). 1064 const std::string getAsStr() const override { 1065 return getAssumed() ? "nofree" : "may-free"; 1066 } 1067 1068 /// See AbstractAttribute::updateImpl(...). 1069 ChangeStatus updateImpl(Attributor &A) override; 1070 1071 /// See AbstractAttribute::getAttrKind(). 1072 Attribute::AttrKind getAttrKind() const override { return ID; } 1073 1074 /// Return true if "nofree" is assumed. 1075 bool isAssumedNoFree() const { return getAssumed(); } 1076 1077 /// Return true if "nofree" is known. 1078 bool isKnownNoFree() const { return getKnown(); } 1079 1080 /// The identifier used by the Attributor for this class of attributes. 1081 static constexpr Attribute::AttrKind ID = Attribute::NoFree; 1082 }; 1083 1084 ChangeStatus AANoFreeFunction::updateImpl(Attributor &A) { 1085 Function &F = getAnchorScope(); 1086 1087 auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F); 1088 1089 // The map from instruction opcodes to those instructions in the function. 1090 auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F); 1091 1092 for (unsigned Opcode : 1093 {(unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr, 1094 (unsigned)Instruction::Call}) { 1095 for (Instruction *I : OpcodeInstMap[Opcode]) { 1096 // Skip assumed dead instructions. 1097 if (LivenessAA && LivenessAA->isAssumedDead(I)) 1098 continue; 1099 auto ICS = ImmutableCallSite(I); 1100 auto *NoFreeAA = A.getAAFor<AANoFreeFunction>(*this, *I); 1101 1102 if ((!NoFreeAA || !NoFreeAA->isAssumedNoFree()) && 1103 !ICS.hasFnAttr(Attribute::NoFree)) { 1104 indicatePessimisticFixpoint(); 1105 return ChangeStatus::CHANGED; 1106 } 1107 } 1108 } 1109 return ChangeStatus::UNCHANGED; 1110 } 1111 1112 /// ------------------------ NonNull Argument Attribute ------------------------ 1113 struct AANonNullImpl : AANonNull, BooleanState { 1114 1115 AANonNullImpl(Value &V, InformationCache &InfoCache) 1116 : AANonNull(V, InfoCache) {} 1117 1118 AANonNullImpl(Value *AssociatedVal, Value &AnchoredValue, 1119 InformationCache &InfoCache) 1120 : AANonNull(AssociatedVal, AnchoredValue, InfoCache) {} 1121 1122 /// See AbstractAttribute::getState() 1123 /// { 1124 AbstractState &getState() override { return *this; } 1125 const AbstractState &getState() const override { return *this; } 1126 /// } 1127 1128 /// See AbstractAttribute::getAsStr(). 1129 const std::string getAsStr() const override { 1130 return getAssumed() ? "nonnull" : "may-null"; 1131 } 1132 1133 /// See AANonNull::isAssumedNonNull(). 1134 bool isAssumedNonNull() const override { return getAssumed(); } 1135 1136 /// See AANonNull::isKnownNonNull(). 1137 bool isKnownNonNull() const override { return getKnown(); } 1138 1139 /// Generate a predicate that checks if a given value is assumed nonnull. 1140 /// The generated function returns true if a value satisfies any of 1141 /// following conditions. 1142 /// (i) A value is known nonZero(=nonnull). 1143 /// (ii) A value is associated with AANonNull and its isAssumedNonNull() is 1144 /// true. 1145 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> 1146 generatePredicate(Attributor &); 1147 }; 1148 1149 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> 1150 AANonNullImpl::generatePredicate(Attributor &A) { 1151 // FIXME: The `AAReturnedValues` should provide the predicate with the 1152 // `ReturnInst` vector as well such that we can use the control flow sensitive 1153 // version of `isKnownNonZero`. This should fix `test11` in 1154 // `test/Transforms/FunctionAttrs/nonnull.ll` 1155 1156 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred = 1157 [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool { 1158 Function &F = getAnchorScope(); 1159 1160 if (isKnownNonZero(&RV, F.getParent()->getDataLayout())) 1161 return true; 1162 1163 auto *NonNullAA = A.getAAFor<AANonNull>(*this, RV); 1164 1165 ImmutableCallSite ICS(&RV); 1166 1167 if ((!NonNullAA || !NonNullAA->isAssumedNonNull()) && 1168 (!ICS || !ICS.hasRetAttr(Attribute::NonNull))) 1169 return false; 1170 1171 return true; 1172 }; 1173 1174 return Pred; 1175 } 1176 1177 /// NonNull attribute for function return value. 1178 struct AANonNullReturned : AANonNullImpl { 1179 1180 AANonNullReturned(Function &F, InformationCache &InfoCache) 1181 : AANonNullImpl(F, InfoCache) {} 1182 1183 /// See AbstractAttribute::getManifestPosition(). 1184 ManifestPosition getManifestPosition() const override { return MP_RETURNED; } 1185 1186 /// See AbstractAttriubute::initialize(...). 1187 void initialize(Attributor &A) override { 1188 Function &F = getAnchorScope(); 1189 1190 // Already nonnull. 1191 if (F.getAttributes().hasAttribute(AttributeList::ReturnIndex, 1192 Attribute::NonNull) || 1193 F.getAttributes().hasAttribute(AttributeList::ReturnIndex, 1194 Attribute::Dereferenceable)) 1195 indicateOptimisticFixpoint(); 1196 } 1197 1198 /// See AbstractAttribute::updateImpl(...). 1199 ChangeStatus updateImpl(Attributor &A) override; 1200 }; 1201 1202 ChangeStatus AANonNullReturned::updateImpl(Attributor &A) { 1203 Function &F = getAnchorScope(); 1204 1205 auto *AARetVal = A.getAAFor<AAReturnedValues>(*this, F); 1206 if (!AARetVal) { 1207 indicatePessimisticFixpoint(); 1208 return ChangeStatus::CHANGED; 1209 } 1210 1211 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred = 1212 this->generatePredicate(A); 1213 1214 if (!AARetVal->checkForallReturnedValues(Pred)) { 1215 indicatePessimisticFixpoint(); 1216 return ChangeStatus::CHANGED; 1217 } 1218 return ChangeStatus::UNCHANGED; 1219 } 1220 1221 /// NonNull attribute for function argument. 1222 struct AANonNullArgument : AANonNullImpl { 1223 1224 AANonNullArgument(Argument &A, InformationCache &InfoCache) 1225 : AANonNullImpl(A, InfoCache) {} 1226 1227 /// See AbstractAttribute::getManifestPosition(). 1228 ManifestPosition getManifestPosition() const override { return MP_ARGUMENT; } 1229 1230 /// See AbstractAttriubute::initialize(...). 1231 void initialize(Attributor &A) override { 1232 Argument *Arg = cast<Argument>(getAssociatedValue()); 1233 if (Arg->hasNonNullAttr()) 1234 indicateOptimisticFixpoint(); 1235 } 1236 1237 /// See AbstractAttribute::updateImpl(...). 1238 ChangeStatus updateImpl(Attributor &A) override; 1239 }; 1240 1241 /// NonNull attribute for a call site argument. 1242 struct AANonNullCallSiteArgument : AANonNullImpl { 1243 1244 /// See AANonNullImpl::AANonNullImpl(...). 1245 AANonNullCallSiteArgument(CallSite CS, unsigned ArgNo, 1246 InformationCache &InfoCache) 1247 : AANonNullImpl(CS.getArgOperand(ArgNo), *CS.getInstruction(), InfoCache), 1248 ArgNo(ArgNo) {} 1249 1250 /// See AbstractAttribute::initialize(...). 1251 void initialize(Attributor &A) override { 1252 CallSite CS(&getAnchoredValue()); 1253 if (CS.paramHasAttr(ArgNo, getAttrKind()) || 1254 CS.paramHasAttr(ArgNo, Attribute::Dereferenceable) || 1255 isKnownNonZero(getAssociatedValue(), 1256 getAnchorScope().getParent()->getDataLayout())) 1257 indicateOptimisticFixpoint(); 1258 } 1259 1260 /// See AbstractAttribute::updateImpl(Attributor &A). 1261 ChangeStatus updateImpl(Attributor &A) override; 1262 1263 /// See AbstractAttribute::getManifestPosition(). 1264 ManifestPosition getManifestPosition() const override { 1265 return MP_CALL_SITE_ARGUMENT; 1266 }; 1267 1268 // Return argument index of associated value. 1269 int getArgNo() const { return ArgNo; } 1270 1271 private: 1272 unsigned ArgNo; 1273 }; 1274 ChangeStatus AANonNullArgument::updateImpl(Attributor &A) { 1275 Function &F = getAnchorScope(); 1276 Argument &Arg = cast<Argument>(getAnchoredValue()); 1277 1278 unsigned ArgNo = Arg.getArgNo(); 1279 1280 // Callback function 1281 std::function<bool(CallSite)> CallSiteCheck = [&](CallSite CS) { 1282 assert(CS && "Sanity check: Call site was not initialized properly!"); 1283 1284 auto *NonNullAA = A.getAAFor<AANonNull>(*this, *CS.getInstruction(), ArgNo); 1285 1286 // Check that NonNullAA is AANonNullCallSiteArgument. 1287 if (NonNullAA) { 1288 ImmutableCallSite ICS(&NonNullAA->getAnchoredValue()); 1289 if (ICS && CS.getInstruction() == ICS.getInstruction()) 1290 return NonNullAA->isAssumedNonNull(); 1291 return false; 1292 } 1293 1294 if (CS.paramHasAttr(ArgNo, Attribute::NonNull)) 1295 return true; 1296 1297 Value *V = CS.getArgOperand(ArgNo); 1298 if (isKnownNonZero(V, getAnchorScope().getParent()->getDataLayout())) 1299 return true; 1300 1301 return false; 1302 }; 1303 if (!A.checkForAllCallSites(F, CallSiteCheck, true, *this)) { 1304 indicatePessimisticFixpoint(); 1305 return ChangeStatus::CHANGED; 1306 } 1307 return ChangeStatus::UNCHANGED; 1308 } 1309 1310 ChangeStatus AANonNullCallSiteArgument::updateImpl(Attributor &A) { 1311 // NOTE: Never look at the argument of the callee in this method. 1312 // If we do this, "nonnull" is always deduced because of the assumption. 1313 1314 Value &V = *getAssociatedValue(); 1315 1316 auto *NonNullAA = A.getAAFor<AANonNull>(*this, V); 1317 1318 if (!NonNullAA || !NonNullAA->isAssumedNonNull()) { 1319 indicatePessimisticFixpoint(); 1320 return ChangeStatus::CHANGED; 1321 } 1322 1323 return ChangeStatus::UNCHANGED; 1324 } 1325 1326 /// ------------------------ Will-Return Attributes ---------------------------- 1327 1328 struct AAWillReturnImpl : public AAWillReturn, BooleanState { 1329 1330 /// See AbstractAttribute::AbstractAttribute(...). 1331 AAWillReturnImpl(Function &F, InformationCache &InfoCache) 1332 : AAWillReturn(F, InfoCache) {} 1333 1334 /// See AAWillReturn::isKnownWillReturn(). 1335 bool isKnownWillReturn() const override { return getKnown(); } 1336 1337 /// See AAWillReturn::isAssumedWillReturn(). 1338 bool isAssumedWillReturn() const override { return getAssumed(); } 1339 1340 /// See AbstractAttribute::getState(...). 1341 AbstractState &getState() override { return *this; } 1342 1343 /// See AbstractAttribute::getState(...). 1344 const AbstractState &getState() const override { return *this; } 1345 1346 /// See AbstractAttribute::getAsStr() 1347 const std::string getAsStr() const override { 1348 return getAssumed() ? "willreturn" : "may-noreturn"; 1349 } 1350 }; 1351 1352 struct AAWillReturnFunction final : AAWillReturnImpl { 1353 1354 /// See AbstractAttribute::AbstractAttribute(...). 1355 AAWillReturnFunction(Function &F, InformationCache &InfoCache) 1356 : AAWillReturnImpl(F, InfoCache) {} 1357 1358 /// See AbstractAttribute::getManifestPosition(). 1359 ManifestPosition getManifestPosition() const override { return MP_FUNCTION; } 1360 1361 /// See AbstractAttribute::initialize(...). 1362 void initialize(Attributor &A) override; 1363 1364 /// See AbstractAttribute::updateImpl(...). 1365 ChangeStatus updateImpl(Attributor &A) override; 1366 }; 1367 1368 // Helper function that checks whether a function has any cycle. 1369 // TODO: Replace with more efficent code 1370 bool containsCycle(Function &F) { 1371 SmallPtrSet<BasicBlock *, 32> Visited; 1372 1373 // Traverse BB by dfs and check whether successor is already visited. 1374 for (BasicBlock *BB : depth_first(&F)) { 1375 Visited.insert(BB); 1376 for (auto *SuccBB : successors(BB)) { 1377 if (Visited.count(SuccBB)) 1378 return true; 1379 } 1380 } 1381 return false; 1382 } 1383 1384 // Helper function that checks the function have a loop which might become an 1385 // endless loop 1386 // FIXME: Any cycle is regarded as endless loop for now. 1387 // We have to allow some patterns. 1388 bool containsPossiblyEndlessLoop(Function &F) { return containsCycle(F); } 1389 1390 void AAWillReturnFunction::initialize(Attributor &A) { 1391 Function &F = getAnchorScope(); 1392 1393 if (containsPossiblyEndlessLoop(F)) 1394 indicatePessimisticFixpoint(); 1395 } 1396 1397 ChangeStatus AAWillReturnFunction::updateImpl(Attributor &A) { 1398 Function &F = getAnchorScope(); 1399 1400 // The map from instruction opcodes to those instructions in the function. 1401 auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F); 1402 1403 auto *LivenessAA = A.getAAFor<AAIsDead>(*this, F); 1404 1405 for (unsigned Opcode : 1406 {(unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr, 1407 (unsigned)Instruction::Call}) { 1408 for (Instruction *I : OpcodeInstMap[Opcode]) { 1409 // Skip assumed dead instructions. 1410 if (LivenessAA && LivenessAA->isAssumedDead(I)) 1411 continue; 1412 1413 auto ICS = ImmutableCallSite(I); 1414 1415 if (ICS.hasFnAttr(Attribute::WillReturn)) 1416 continue; 1417 1418 auto *WillReturnAA = A.getAAFor<AAWillReturn>(*this, *I); 1419 if (!WillReturnAA || !WillReturnAA->isAssumedWillReturn()) { 1420 indicatePessimisticFixpoint(); 1421 return ChangeStatus::CHANGED; 1422 } 1423 1424 auto *NoRecurseAA = A.getAAFor<AANoRecurse>(*this, *I); 1425 1426 // FIXME: (i) Prohibit any recursion for now. 1427 // (ii) AANoRecurse isn't implemented yet so currently any call is 1428 // regarded as having recursion. 1429 // Code below should be 1430 // if ((!NoRecurseAA || !NoRecurseAA->isAssumedNoRecurse()) && 1431 if (!NoRecurseAA && !ICS.hasFnAttr(Attribute::NoRecurse)) { 1432 indicatePessimisticFixpoint(); 1433 return ChangeStatus::CHANGED; 1434 } 1435 } 1436 } 1437 1438 return ChangeStatus::UNCHANGED; 1439 } 1440 1441 /// ------------------------ NoAlias Argument Attribute ------------------------ 1442 1443 struct AANoAliasImpl : AANoAlias, BooleanState { 1444 1445 AANoAliasImpl(Value &V, InformationCache &InfoCache) 1446 : AANoAlias(V, InfoCache) {} 1447 1448 /// See AbstractAttribute::getState() 1449 /// { 1450 AbstractState &getState() override { return *this; } 1451 const AbstractState &getState() const override { return *this; } 1452 /// } 1453 1454 const std::string getAsStr() const override { 1455 return getAssumed() ? "noalias" : "may-alias"; 1456 } 1457 1458 /// See AANoAlias::isAssumedNoAlias(). 1459 bool isAssumedNoAlias() const override { return getAssumed(); } 1460 1461 /// See AANoAlias::isKnowndNoAlias(). 1462 bool isKnownNoAlias() const override { return getKnown(); } 1463 }; 1464 1465 /// NoAlias attribute for function return value. 1466 struct AANoAliasReturned : AANoAliasImpl { 1467 1468 AANoAliasReturned(Function &F, InformationCache &InfoCache) 1469 : AANoAliasImpl(F, InfoCache) {} 1470 1471 /// See AbstractAttribute::getManifestPosition(). 1472 virtual ManifestPosition getManifestPosition() const override { 1473 return MP_RETURNED; 1474 } 1475 1476 /// See AbstractAttriubute::initialize(...). 1477 void initialize(Attributor &A) override { 1478 Function &F = getAnchorScope(); 1479 1480 // Already noalias. 1481 if (F.returnDoesNotAlias()) { 1482 indicateOptimisticFixpoint(); 1483 return; 1484 } 1485 } 1486 1487 /// See AbstractAttribute::updateImpl(...). 1488 virtual ChangeStatus updateImpl(Attributor &A) override; 1489 }; 1490 1491 ChangeStatus AANoAliasReturned::updateImpl(Attributor &A) { 1492 Function &F = getAnchorScope(); 1493 1494 auto *AARetValImpl = A.getAAFor<AAReturnedValuesImpl>(*this, F); 1495 if (!AARetValImpl) { 1496 indicatePessimisticFixpoint(); 1497 return ChangeStatus::CHANGED; 1498 } 1499 1500 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred = 1501 [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool { 1502 if (Constant *C = dyn_cast<Constant>(&RV)) 1503 if (C->isNullValue() || isa<UndefValue>(C)) 1504 return true; 1505 1506 /// For now, we can only deduce noalias if we have call sites. 1507 /// FIXME: add more support. 1508 ImmutableCallSite ICS(&RV); 1509 if (!ICS) 1510 return false; 1511 1512 auto *NoAliasAA = A.getAAFor<AANoAlias>(*this, RV); 1513 1514 if (!ICS.returnDoesNotAlias() && 1515 (!NoAliasAA || !NoAliasAA->isAssumedNoAlias())) 1516 return false; 1517 1518 /// FIXME: We can improve capture check in two ways: 1519 /// 1. Use the AANoCapture facilities. 1520 /// 2. Use the location of return insts for escape queries. 1521 if (PointerMayBeCaptured(&RV, /* ReturnCaptures */ false, 1522 /* StoreCaptures */ true)) 1523 return false; 1524 1525 return true; 1526 }; 1527 1528 if (!AARetValImpl->checkForallReturnedValues(Pred)) { 1529 indicatePessimisticFixpoint(); 1530 return ChangeStatus::CHANGED; 1531 } 1532 1533 return ChangeStatus::UNCHANGED; 1534 } 1535 1536 /// -------------------AAIsDead Function Attribute----------------------- 1537 1538 struct AAIsDeadFunction : AAIsDead, BooleanState { 1539 1540 AAIsDeadFunction(Function &F, InformationCache &InfoCache) 1541 : AAIsDead(F, InfoCache) {} 1542 1543 /// See AbstractAttribute::getState() 1544 /// { 1545 AbstractState &getState() override { return *this; } 1546 const AbstractState &getState() const override { return *this; } 1547 /// } 1548 1549 /// See AbstractAttribute::getManifestPosition(). 1550 ManifestPosition getManifestPosition() const override { return MP_FUNCTION; } 1551 1552 void initialize(Attributor &A) override { 1553 Function &F = getAnchorScope(); 1554 1555 ToBeExploredPaths.insert(&(F.getEntryBlock().front())); 1556 AssumedLiveBlocks.insert(&(F.getEntryBlock())); 1557 for (size_t i = 0; i < ToBeExploredPaths.size(); ++i) 1558 explorePath(A, ToBeExploredPaths[i]); 1559 } 1560 1561 /// Explores new instructions starting from \p I. If instruction is dead, stop 1562 /// and return true if it discovered a new instruction. 1563 bool explorePath(Attributor &A, Instruction *I); 1564 1565 const std::string getAsStr() const override { 1566 return "LiveBBs(" + std::to_string(AssumedLiveBlocks.size()) + "/" + 1567 std::to_string(getAnchorScope().size()) + ")"; 1568 } 1569 1570 /// See AbstractAttribute::manifest(...). 1571 ChangeStatus manifest(Attributor &A) override { 1572 assert(getState().isValidState() && 1573 "Attempted to manifest an invalid state!"); 1574 1575 ChangeStatus HasChanged = ChangeStatus::UNCHANGED; 1576 1577 for (Instruction *I : NoReturnCalls) { 1578 BasicBlock *BB = I->getParent(); 1579 1580 /// Invoke is replaced with a call and unreachable is placed after it. 1581 if (auto *II = dyn_cast<InvokeInst>(I)) { 1582 changeToCall(II); 1583 changeToUnreachable(BB->getTerminator(), /* UseLLVMTrap */ false); 1584 LLVM_DEBUG(dbgs() << "[AAIsDead] Replaced invoke with call inst\n"); 1585 continue; 1586 } 1587 1588 SplitBlock(BB, I->getNextNode()); 1589 changeToUnreachable(BB->getTerminator(), /* UseLLVMTrap */ false); 1590 HasChanged = ChangeStatus::CHANGED; 1591 } 1592 1593 return HasChanged; 1594 } 1595 1596 /// See AbstractAttribute::updateImpl(...). 1597 ChangeStatus updateImpl(Attributor &A) override; 1598 1599 /// See AAIsDead::isAssumedDead(BasicBlock *). 1600 bool isAssumedDead(BasicBlock *BB) const override { 1601 assert(BB->getParent() == &getAnchorScope() && 1602 "BB must be in the same anchor scope function."); 1603 1604 if (!getAssumed()) 1605 return false; 1606 return !AssumedLiveBlocks.count(BB); 1607 } 1608 1609 /// See AAIsDead::isKnownDead(BasicBlock *). 1610 bool isKnownDead(BasicBlock *BB) const override { 1611 return getKnown() && isAssumedDead(BB); 1612 } 1613 1614 /// See AAIsDead::isAssumed(Instruction *I). 1615 bool isAssumedDead(Instruction *I) const override { 1616 assert(I->getParent()->getParent() == &getAnchorScope() && 1617 "Instruction must be in the same anchor scope function."); 1618 1619 if (!getAssumed()) 1620 return false; 1621 1622 // If it is not in AssumedLiveBlocks then it for sure dead. 1623 // Otherwise, it can still be after noreturn call in a live block. 1624 if (!AssumedLiveBlocks.count(I->getParent())) 1625 return true; 1626 1627 // If it is not after a noreturn call, than it is live. 1628 if (!isAfterNoReturn(I)) 1629 return false; 1630 1631 // Definitely dead. 1632 return true; 1633 } 1634 1635 /// See AAIsDead::isKnownDead(Instruction *I). 1636 bool isKnownDead(Instruction *I) const override { 1637 return getKnown() && isAssumedDead(I); 1638 } 1639 1640 /// Check if instruction is after noreturn call, in other words, assumed dead. 1641 bool isAfterNoReturn(Instruction *I) const; 1642 1643 /// Collection of to be explored paths. 1644 SmallSetVector<Instruction *, 8> ToBeExploredPaths; 1645 1646 /// Collection of all assumed live BasicBlocks. 1647 DenseSet<BasicBlock *> AssumedLiveBlocks; 1648 1649 /// Collection of calls with noreturn attribute, assumed or knwon. 1650 SmallSetVector<Instruction *, 4> NoReturnCalls; 1651 }; 1652 1653 bool AAIsDeadFunction::isAfterNoReturn(Instruction *I) const { 1654 Instruction *PrevI = I->getPrevNode(); 1655 while (PrevI) { 1656 if (NoReturnCalls.count(PrevI)) 1657 return true; 1658 PrevI = PrevI->getPrevNode(); 1659 } 1660 return false; 1661 } 1662 1663 bool AAIsDeadFunction::explorePath(Attributor &A, Instruction *I) { 1664 BasicBlock *BB = I->getParent(); 1665 1666 while (I) { 1667 ImmutableCallSite ICS(I); 1668 1669 if (ICS) { 1670 auto *NoReturnAA = A.getAAFor<AANoReturn>(*this, *I); 1671 1672 if (NoReturnAA && NoReturnAA->isAssumedNoReturn()) { 1673 if (!NoReturnCalls.insert(I)) 1674 // If I is already in the NoReturnCalls set, then it stayed noreturn 1675 // and we didn't discover any new instructions. 1676 return false; 1677 1678 // Discovered new noreturn call, return true to indicate that I is not 1679 // noreturn anymore and should be deleted from NoReturnCalls. 1680 return true; 1681 } 1682 1683 if (ICS.hasFnAttr(Attribute::NoReturn)) { 1684 if (!NoReturnCalls.insert(I)) 1685 return false; 1686 1687 return true; 1688 } 1689 } 1690 1691 I = I->getNextNode(); 1692 } 1693 1694 // get new paths (reachable blocks). 1695 for (BasicBlock *SuccBB : successors(BB)) { 1696 Instruction *Inst = &(SuccBB->front()); 1697 AssumedLiveBlocks.insert(SuccBB); 1698 ToBeExploredPaths.insert(Inst); 1699 } 1700 1701 return true; 1702 } 1703 1704 ChangeStatus AAIsDeadFunction::updateImpl(Attributor &A) { 1705 // Temporary collection to iterate over existing noreturn instructions. This 1706 // will alow easier modification of NoReturnCalls collection 1707 SmallVector<Instruction *, 8> NoReturnChanged; 1708 ChangeStatus Status = ChangeStatus::UNCHANGED; 1709 1710 for (Instruction *I : NoReturnCalls) 1711 NoReturnChanged.push_back(I); 1712 1713 for (Instruction *I : NoReturnChanged) { 1714 size_t Size = ToBeExploredPaths.size(); 1715 1716 // Still noreturn. 1717 if (!explorePath(A, I)) 1718 continue; 1719 1720 NoReturnCalls.remove(I); 1721 1722 // At least one new path. 1723 Status = ChangeStatus::CHANGED; 1724 1725 // No new paths. 1726 if (Size == ToBeExploredPaths.size()) 1727 continue; 1728 1729 // explore new paths. 1730 while (Size != ToBeExploredPaths.size()) 1731 explorePath(A, ToBeExploredPaths[Size++]); 1732 } 1733 1734 LLVM_DEBUG( 1735 dbgs() << "[AAIsDead] AssumedLiveBlocks: " << AssumedLiveBlocks.size() 1736 << " Total number of blocks: " << getAnchorScope().size() << "\n"); 1737 1738 return Status; 1739 } 1740 1741 /// -------------------- Dereferenceable Argument Attribute -------------------- 1742 1743 struct DerefState : AbstractState { 1744 1745 /// State representing for dereferenceable bytes. 1746 IntegerState DerefBytesState; 1747 1748 /// State representing that whether the value is nonnull or global. 1749 IntegerState NonNullGlobalState; 1750 1751 /// Bits encoding for NonNullGlobalState. 1752 enum { 1753 DEREF_NONNULL = 1 << 0, 1754 DEREF_GLOBAL = 1 << 1, 1755 }; 1756 1757 /// See AbstractState::isValidState() 1758 bool isValidState() const override { return DerefBytesState.isValidState(); } 1759 1760 // See AbstractState::isAtFixpoint() 1761 bool isAtFixpoint() const override { 1762 return DerefBytesState.isAtFixpoint() && NonNullGlobalState.isAtFixpoint(); 1763 } 1764 1765 /// See AbstractState::indicateOptimisticFixpoint(...) 1766 void indicateOptimisticFixpoint() override { 1767 DerefBytesState.indicateOptimisticFixpoint(); 1768 NonNullGlobalState.indicateOptimisticFixpoint(); 1769 } 1770 1771 /// See AbstractState::indicatePessimisticFixpoint(...) 1772 void indicatePessimisticFixpoint() override { 1773 DerefBytesState.indicatePessimisticFixpoint(); 1774 NonNullGlobalState.indicatePessimisticFixpoint(); 1775 } 1776 1777 /// Update known dereferenceable bytes. 1778 void takeKnownDerefBytesMaximum(uint64_t Bytes) { 1779 DerefBytesState.takeKnownMaximum(Bytes); 1780 } 1781 1782 /// Update assumed dereferenceable bytes. 1783 void takeAssumedDerefBytesMinimum(uint64_t Bytes) { 1784 DerefBytesState.takeAssumedMinimum(Bytes); 1785 } 1786 1787 /// Update assumed NonNullGlobalState 1788 void updateAssumedNonNullGlobalState(bool IsNonNull, bool IsGlobal) { 1789 if (!IsNonNull) 1790 NonNullGlobalState.removeAssumedBits(DEREF_NONNULL); 1791 if (!IsGlobal) 1792 NonNullGlobalState.removeAssumedBits(DEREF_GLOBAL); 1793 } 1794 1795 /// Equality for DerefState. 1796 bool operator==(const DerefState &R) { 1797 return this->DerefBytesState == R.DerefBytesState && 1798 this->NonNullGlobalState == R.NonNullGlobalState; 1799 } 1800 }; 1801 struct AADereferenceableImpl : AADereferenceable, DerefState { 1802 1803 AADereferenceableImpl(Value &V, InformationCache &InfoCache) 1804 : AADereferenceable(V, InfoCache) {} 1805 1806 AADereferenceableImpl(Value *AssociatedVal, Value &AnchoredValue, 1807 InformationCache &InfoCache) 1808 : AADereferenceable(AssociatedVal, AnchoredValue, InfoCache) {} 1809 1810 /// See AbstractAttribute::getState() 1811 /// { 1812 AbstractState &getState() override { return *this; } 1813 const AbstractState &getState() const override { return *this; } 1814 /// } 1815 1816 /// See AADereferenceable::getAssumedDereferenceableBytes(). 1817 uint32_t getAssumedDereferenceableBytes() const override { 1818 return DerefBytesState.getAssumed(); 1819 } 1820 1821 /// See AADereferenceable::getKnownDereferenceableBytes(). 1822 uint32_t getKnownDereferenceableBytes() const override { 1823 return DerefBytesState.getKnown(); 1824 } 1825 1826 // Helper function for syncing nonnull state. 1827 void syncNonNull(const AANonNull *NonNullAA) { 1828 if (!NonNullAA) { 1829 NonNullGlobalState.removeAssumedBits(DEREF_NONNULL); 1830 return; 1831 } 1832 1833 if (NonNullAA->isKnownNonNull()) 1834 NonNullGlobalState.addKnownBits(DEREF_NONNULL); 1835 1836 if (!NonNullAA->isAssumedNonNull()) 1837 NonNullGlobalState.removeAssumedBits(DEREF_NONNULL); 1838 } 1839 1840 /// See AADereferenceable::isAssumedGlobal(). 1841 bool isAssumedGlobal() const override { 1842 return NonNullGlobalState.isAssumed(DEREF_GLOBAL); 1843 } 1844 1845 /// See AADereferenceable::isKnownGlobal(). 1846 bool isKnownGlobal() const override { 1847 return NonNullGlobalState.isKnown(DEREF_GLOBAL); 1848 } 1849 1850 /// See AADereferenceable::isAssumedNonNull(). 1851 bool isAssumedNonNull() const override { 1852 return NonNullGlobalState.isAssumed(DEREF_NONNULL); 1853 } 1854 1855 /// See AADereferenceable::isKnownNonNull(). 1856 bool isKnownNonNull() const override { 1857 return NonNullGlobalState.isKnown(DEREF_NONNULL); 1858 } 1859 1860 void getDeducedAttributes(SmallVectorImpl<Attribute> &Attrs) const override { 1861 LLVMContext &Ctx = AnchoredVal.getContext(); 1862 1863 // TODO: Add *_globally support 1864 if (isAssumedNonNull()) 1865 Attrs.emplace_back(Attribute::getWithDereferenceableBytes( 1866 Ctx, getAssumedDereferenceableBytes())); 1867 else 1868 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes( 1869 Ctx, getAssumedDereferenceableBytes())); 1870 } 1871 uint64_t computeAssumedDerefenceableBytes(Attributor &A, Value &V, 1872 bool &IsNonNull, bool &IsGlobal); 1873 1874 void initialize(Attributor &A) override { 1875 Function &F = getAnchorScope(); 1876 unsigned AttrIdx = 1877 getAttrIndex(getManifestPosition(), getArgNo(getAnchoredValue())); 1878 1879 for (Attribute::AttrKind AK : 1880 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull}) 1881 if (F.getAttributes().hasAttribute(AttrIdx, AK)) 1882 takeKnownDerefBytesMaximum(F.getAttribute(AttrIdx, AK).getValueAsInt()); 1883 } 1884 1885 /// See AbstractAttribute::getAsStr(). 1886 const std::string getAsStr() const override { 1887 if (!getAssumedDereferenceableBytes()) 1888 return "unknown-dereferenceable"; 1889 return std::string("dereferenceable") + 1890 (isAssumedNonNull() ? "" : "_or_null") + 1891 (isAssumedGlobal() ? "_globally" : "") + "<" + 1892 std::to_string(getKnownDereferenceableBytes()) + "-" + 1893 std::to_string(getAssumedDereferenceableBytes()) + ">"; 1894 } 1895 }; 1896 1897 struct AADereferenceableReturned : AADereferenceableImpl { 1898 AADereferenceableReturned(Function &F, InformationCache &InfoCache) 1899 : AADereferenceableImpl(F, InfoCache) {} 1900 1901 /// See AbstractAttribute::getManifestPosition(). 1902 ManifestPosition getManifestPosition() const override { return MP_RETURNED; } 1903 1904 /// See AbstractAttribute::updateImpl(...). 1905 ChangeStatus updateImpl(Attributor &A) override; 1906 }; 1907 1908 // Helper function that returns dereferenceable bytes. 1909 static uint64_t calcDifferenceIfBaseIsNonNull(int64_t DerefBytes, 1910 int64_t Offset, bool IsNonNull) { 1911 if (!IsNonNull) 1912 return 0; 1913 return std::max((int64_t)0, DerefBytes - Offset); 1914 } 1915 1916 uint64_t AADereferenceableImpl::computeAssumedDerefenceableBytes( 1917 Attributor &A, Value &V, bool &IsNonNull, bool &IsGlobal) { 1918 // TODO: Tracking the globally flag. 1919 IsGlobal = false; 1920 1921 // First, we try to get information about V from Attributor. 1922 if (auto *DerefAA = A.getAAFor<AADereferenceable>(*this, V)) { 1923 IsNonNull &= DerefAA->isAssumedNonNull(); 1924 return DerefAA->getAssumedDereferenceableBytes(); 1925 } 1926 1927 // Otherwise, we try to compute assumed bytes from base pointer. 1928 const DataLayout &DL = getAnchorScope().getParent()->getDataLayout(); 1929 unsigned IdxWidth = 1930 DL.getIndexSizeInBits(V.getType()->getPointerAddressSpace()); 1931 APInt Offset(IdxWidth, 0); 1932 Value *Base = V.stripAndAccumulateInBoundsConstantOffsets(DL, Offset); 1933 1934 if (auto *BaseDerefAA = A.getAAFor<AADereferenceable>(*this, *Base)) { 1935 IsNonNull &= Offset != 0; 1936 return calcDifferenceIfBaseIsNonNull( 1937 BaseDerefAA->getAssumedDereferenceableBytes(), Offset.getSExtValue(), 1938 Offset != 0 || BaseDerefAA->isAssumedNonNull()); 1939 } 1940 1941 // Then, use IR information. 1942 1943 if (isDereferenceablePointer(Base, Base->getType(), DL)) 1944 return calcDifferenceIfBaseIsNonNull( 1945 DL.getTypeStoreSize(Base->getType()->getPointerElementType()), 1946 Offset.getSExtValue(), 1947 !NullPointerIsDefined(&getAnchorScope(), 1948 V.getType()->getPointerAddressSpace())); 1949 1950 IsNonNull = false; 1951 return 0; 1952 } 1953 ChangeStatus AADereferenceableReturned::updateImpl(Attributor &A) { 1954 Function &F = getAnchorScope(); 1955 auto BeforeState = static_cast<DerefState>(*this); 1956 1957 syncNonNull(A.getAAFor<AANonNull>(*this, F)); 1958 1959 auto *AARetVal = A.getAAFor<AAReturnedValues>(*this, F); 1960 if (!AARetVal) { 1961 indicatePessimisticFixpoint(); 1962 return ChangeStatus::CHANGED; 1963 } 1964 1965 bool IsNonNull = isAssumedNonNull(); 1966 bool IsGlobal = isAssumedGlobal(); 1967 1968 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred = 1969 [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool { 1970 takeAssumedDerefBytesMinimum( 1971 computeAssumedDerefenceableBytes(A, RV, IsNonNull, IsGlobal)); 1972 return isValidState(); 1973 }; 1974 1975 if (AARetVal->checkForallReturnedValues(Pred)) { 1976 updateAssumedNonNullGlobalState(IsNonNull, IsGlobal); 1977 return BeforeState == static_cast<DerefState>(*this) 1978 ? ChangeStatus::UNCHANGED 1979 : ChangeStatus::CHANGED; 1980 } 1981 indicatePessimisticFixpoint(); 1982 return ChangeStatus::CHANGED; 1983 } 1984 1985 struct AADereferenceableArgument : AADereferenceableImpl { 1986 AADereferenceableArgument(Argument &A, InformationCache &InfoCache) 1987 : AADereferenceableImpl(A, InfoCache) {} 1988 1989 /// See AbstractAttribute::getManifestPosition(). 1990 ManifestPosition getManifestPosition() const override { return MP_ARGUMENT; } 1991 1992 /// See AbstractAttribute::updateImpl(...). 1993 ChangeStatus updateImpl(Attributor &A) override; 1994 }; 1995 1996 ChangeStatus AADereferenceableArgument::updateImpl(Attributor &A) { 1997 Function &F = getAnchorScope(); 1998 Argument &Arg = cast<Argument>(getAnchoredValue()); 1999 2000 auto BeforeState = static_cast<DerefState>(*this); 2001 2002 unsigned ArgNo = Arg.getArgNo(); 2003 2004 syncNonNull(A.getAAFor<AANonNull>(*this, F, ArgNo)); 2005 2006 bool IsNonNull = isAssumedNonNull(); 2007 bool IsGlobal = isAssumedGlobal(); 2008 2009 // Callback function 2010 std::function<bool(CallSite)> CallSiteCheck = [&](CallSite CS) -> bool { 2011 assert(CS && "Sanity check: Call site was not initialized properly!"); 2012 2013 // Check that DereferenceableAA is AADereferenceableCallSiteArgument. 2014 if (auto *DereferenceableAA = 2015 A.getAAFor<AADereferenceable>(*this, *CS.getInstruction(), ArgNo)) { 2016 ImmutableCallSite ICS(&DereferenceableAA->getAnchoredValue()); 2017 if (ICS && CS.getInstruction() == ICS.getInstruction()) { 2018 takeAssumedDerefBytesMinimum( 2019 DereferenceableAA->getAssumedDereferenceableBytes()); 2020 IsNonNull &= DereferenceableAA->isAssumedNonNull(); 2021 IsGlobal &= DereferenceableAA->isAssumedGlobal(); 2022 return isValidState(); 2023 } 2024 } 2025 2026 takeAssumedDerefBytesMinimum(computeAssumedDerefenceableBytes( 2027 A, *CS.getArgOperand(ArgNo), IsNonNull, IsGlobal)); 2028 2029 return isValidState(); 2030 }; 2031 2032 if (!A.checkForAllCallSites(F, CallSiteCheck, true, *this)) { 2033 indicatePessimisticFixpoint(); 2034 return ChangeStatus::CHANGED; 2035 } 2036 2037 updateAssumedNonNullGlobalState(IsNonNull, IsGlobal); 2038 2039 return BeforeState == static_cast<DerefState>(*this) ? ChangeStatus::UNCHANGED 2040 : ChangeStatus::CHANGED; 2041 } 2042 2043 /// Dereferenceable attribute for a call site argument. 2044 struct AADereferenceableCallSiteArgument : AADereferenceableImpl { 2045 2046 /// See AADereferenceableImpl::AADereferenceableImpl(...). 2047 AADereferenceableCallSiteArgument(CallSite CS, unsigned ArgNo, 2048 InformationCache &InfoCache) 2049 : AADereferenceableImpl(CS.getArgOperand(ArgNo), *CS.getInstruction(), 2050 InfoCache), 2051 ArgNo(ArgNo) {} 2052 2053 /// See AbstractAttribute::initialize(...). 2054 void initialize(Attributor &A) override { 2055 CallSite CS(&getAnchoredValue()); 2056 if (CS.paramHasAttr(ArgNo, Attribute::Dereferenceable)) 2057 takeKnownDerefBytesMaximum( 2058 CS.getDereferenceableBytes(ArgNo + AttributeList::FirstArgIndex)); 2059 2060 if (CS.paramHasAttr(ArgNo, Attribute::DereferenceableOrNull)) 2061 takeKnownDerefBytesMaximum(CS.getDereferenceableOrNullBytes( 2062 ArgNo + AttributeList::FirstArgIndex)); 2063 } 2064 2065 /// See AbstractAttribute::updateImpl(Attributor &A). 2066 ChangeStatus updateImpl(Attributor &A) override; 2067 2068 /// See AbstractAttribute::getManifestPosition(). 2069 ManifestPosition getManifestPosition() const override { 2070 return MP_CALL_SITE_ARGUMENT; 2071 }; 2072 2073 // Return argument index of associated value. 2074 int getArgNo() const { return ArgNo; } 2075 2076 private: 2077 unsigned ArgNo; 2078 }; 2079 2080 ChangeStatus AADereferenceableCallSiteArgument::updateImpl(Attributor &A) { 2081 // NOTE: Never look at the argument of the callee in this method. 2082 // If we do this, "dereferenceable" is always deduced because of the 2083 // assumption. 2084 2085 Value &V = *getAssociatedValue(); 2086 2087 auto BeforeState = static_cast<DerefState>(*this); 2088 2089 syncNonNull(A.getAAFor<AANonNull>(*this, getAnchoredValue(), ArgNo)); 2090 bool IsNonNull = isAssumedNonNull(); 2091 bool IsGlobal = isKnownGlobal(); 2092 2093 takeAssumedDerefBytesMinimum( 2094 computeAssumedDerefenceableBytes(A, V, IsNonNull, IsGlobal)); 2095 updateAssumedNonNullGlobalState(IsNonNull, IsGlobal); 2096 2097 return BeforeState == static_cast<DerefState>(*this) ? ChangeStatus::UNCHANGED 2098 : ChangeStatus::CHANGED; 2099 } 2100 2101 // ------------------------ Align Argument Attribute ------------------------ 2102 2103 struct AAAlignImpl : AAAlign, IntegerState { 2104 2105 // Max alignemnt value allowed in IR 2106 static const unsigned MAX_ALIGN = 1U << 29; 2107 2108 AAAlignImpl(Value *AssociatedVal, Value &AnchoredValue, 2109 InformationCache &InfoCache) 2110 : AAAlign(AssociatedVal, AnchoredValue, InfoCache), 2111 IntegerState(MAX_ALIGN) {} 2112 2113 AAAlignImpl(Value &V, InformationCache &InfoCache) 2114 : AAAlignImpl(&V, V, InfoCache) {} 2115 2116 /// See AbstractAttribute::getState() 2117 /// { 2118 AbstractState &getState() override { return *this; } 2119 const AbstractState &getState() const override { return *this; } 2120 /// } 2121 2122 virtual const std::string getAsStr() const override { 2123 return getAssumedAlign() ? ("align<" + std::to_string(getKnownAlign()) + 2124 "-" + std::to_string(getAssumedAlign()) + ">") 2125 : "unknown-align"; 2126 } 2127 2128 /// See AAAlign::getAssumedAlign(). 2129 unsigned getAssumedAlign() const override { return getAssumed(); } 2130 2131 /// See AAAlign::getKnownAlign(). 2132 unsigned getKnownAlign() const override { return getKnown(); } 2133 2134 /// See AbstractAttriubute::initialize(...). 2135 void initialize(Attributor &A) override { 2136 Function &F = getAnchorScope(); 2137 2138 unsigned AttrIdx = 2139 getAttrIndex(getManifestPosition(), getArgNo(getAnchoredValue())); 2140 2141 // Already the function has align attribute on return value or argument. 2142 if (F.getAttributes().hasAttribute(AttrIdx, ID)) 2143 addKnownBits(F.getAttribute(AttrIdx, ID).getAlignment()); 2144 } 2145 2146 /// See AbstractAttribute::getDeducedAttributes 2147 virtual void 2148 getDeducedAttributes(SmallVectorImpl<Attribute> &Attrs) const override { 2149 LLVMContext &Ctx = AnchoredVal.getContext(); 2150 2151 Attrs.emplace_back(Attribute::getWithAlignment(Ctx, getAssumedAlign())); 2152 } 2153 }; 2154 2155 /// Align attribute for function return value. 2156 struct AAAlignReturned : AAAlignImpl { 2157 2158 AAAlignReturned(Function &F, InformationCache &InfoCache) 2159 : AAAlignImpl(F, InfoCache) {} 2160 2161 /// See AbstractAttribute::getManifestPosition(). 2162 virtual ManifestPosition getManifestPosition() const override { 2163 return MP_RETURNED; 2164 } 2165 2166 /// See AbstractAttribute::updateImpl(...). 2167 virtual ChangeStatus updateImpl(Attributor &A) override; 2168 }; 2169 2170 ChangeStatus AAAlignReturned::updateImpl(Attributor &A) { 2171 Function &F = getAnchorScope(); 2172 auto *AARetValImpl = A.getAAFor<AAReturnedValuesImpl>(*this, F); 2173 if (!AARetValImpl) { 2174 indicatePessimisticFixpoint(); 2175 return ChangeStatus::CHANGED; 2176 } 2177 2178 // Currently, align<n> is deduced if alignments in return values are assumed 2179 // as greater than n. We reach pessimistic fixpoint if any of the return value 2180 // wouldn't have align. If no assumed state was used for reasoning, an 2181 // optimistic fixpoint is reached earlier. 2182 2183 base_t BeforeState = getAssumed(); 2184 std::function<bool(Value &, const SmallPtrSetImpl<ReturnInst *> &)> Pred = 2185 [&](Value &RV, const SmallPtrSetImpl<ReturnInst *> &RetInsts) -> bool { 2186 auto *AlignAA = A.getAAFor<AAAlign>(*this, RV); 2187 2188 if (AlignAA) 2189 takeAssumedMinimum(AlignAA->getAssumedAlign()); 2190 else 2191 // Use IR information. 2192 takeAssumedMinimum(RV.getPointerAlignment( 2193 getAnchorScope().getParent()->getDataLayout())); 2194 2195 return isValidState(); 2196 }; 2197 2198 if (!AARetValImpl->checkForallReturnedValues(Pred)) { 2199 indicatePessimisticFixpoint(); 2200 return ChangeStatus::CHANGED; 2201 } 2202 2203 return (getAssumed() != BeforeState) ? ChangeStatus::CHANGED 2204 : ChangeStatus::UNCHANGED; 2205 } 2206 2207 /// Align attribute for function argument. 2208 struct AAAlignArgument : AAAlignImpl { 2209 2210 AAAlignArgument(Argument &A, InformationCache &InfoCache) 2211 : AAAlignImpl(A, InfoCache) {} 2212 2213 /// See AbstractAttribute::getManifestPosition(). 2214 virtual ManifestPosition getManifestPosition() const override { 2215 return MP_ARGUMENT; 2216 } 2217 2218 /// See AbstractAttribute::updateImpl(...). 2219 virtual ChangeStatus updateImpl(Attributor &A) override; 2220 }; 2221 2222 ChangeStatus AAAlignArgument::updateImpl(Attributor &A) { 2223 2224 Function &F = getAnchorScope(); 2225 Argument &Arg = cast<Argument>(getAnchoredValue()); 2226 2227 unsigned ArgNo = Arg.getArgNo(); 2228 const DataLayout &DL = F.getParent()->getDataLayout(); 2229 2230 auto BeforeState = getAssumed(); 2231 2232 // Callback function 2233 std::function<bool(CallSite)> CallSiteCheck = [&](CallSite CS) { 2234 assert(CS && "Sanity check: Call site was not initialized properly!"); 2235 2236 auto *AlignAA = A.getAAFor<AAAlign>(*this, *CS.getInstruction(), ArgNo); 2237 2238 // Check that AlignAA is AAAlignCallSiteArgument. 2239 if (AlignAA) { 2240 ImmutableCallSite ICS(&AlignAA->getAnchoredValue()); 2241 if (ICS && CS.getInstruction() == ICS.getInstruction()) { 2242 takeAssumedMinimum(AlignAA->getAssumedAlign()); 2243 return isValidState(); 2244 } 2245 } 2246 2247 Value *V = CS.getArgOperand(ArgNo); 2248 takeAssumedMinimum(V->getPointerAlignment(DL)); 2249 return isValidState(); 2250 }; 2251 2252 if (!A.checkForAllCallSites(F, CallSiteCheck, true, *this)) 2253 indicatePessimisticFixpoint(); 2254 2255 return BeforeState == getAssumed() ? ChangeStatus::UNCHANGED 2256 : ChangeStatus ::CHANGED; 2257 } 2258 2259 struct AAAlignCallSiteArgument : AAAlignImpl { 2260 2261 /// See AANonNullImpl::AANonNullImpl(...). 2262 AAAlignCallSiteArgument(CallSite CS, unsigned ArgNo, 2263 InformationCache &InfoCache) 2264 : AAAlignImpl(CS.getArgOperand(ArgNo), *CS.getInstruction(), InfoCache), 2265 ArgNo(ArgNo) {} 2266 2267 /// See AbstractAttribute::initialize(...). 2268 void initialize(Attributor &A) override { 2269 CallSite CS(&getAnchoredValue()); 2270 takeKnownMaximum(getAssociatedValue()->getPointerAlignment( 2271 getAnchorScope().getParent()->getDataLayout())); 2272 } 2273 2274 /// See AbstractAttribute::updateImpl(Attributor &A). 2275 ChangeStatus updateImpl(Attributor &A) override; 2276 2277 /// See AbstractAttribute::getManifestPosition(). 2278 ManifestPosition getManifestPosition() const override { 2279 return MP_CALL_SITE_ARGUMENT; 2280 }; 2281 2282 // Return argument index of associated value. 2283 int getArgNo() const { return ArgNo; } 2284 2285 private: 2286 unsigned ArgNo; 2287 }; 2288 2289 ChangeStatus AAAlignCallSiteArgument::updateImpl(Attributor &A) { 2290 // NOTE: Never look at the argument of the callee in this method. 2291 // If we do this, "align" is always deduced because of the assumption. 2292 2293 auto BeforeState = getAssumed(); 2294 2295 Value &V = *getAssociatedValue(); 2296 2297 auto *AlignAA = A.getAAFor<AAAlign>(*this, V); 2298 2299 if (AlignAA) 2300 takeAssumedMinimum(AlignAA->getAssumedAlign()); 2301 else 2302 indicatePessimisticFixpoint(); 2303 2304 return BeforeState == getAssumed() ? ChangeStatus::UNCHANGED 2305 : ChangeStatus::CHANGED; 2306 } 2307 2308 /// ---------------------------------------------------------------------------- 2309 /// Attributor 2310 /// ---------------------------------------------------------------------------- 2311 2312 bool Attributor::checkForAllCallSites(Function &F, 2313 std::function<bool(CallSite)> &Pred, 2314 bool RequireAllCallSites, 2315 AbstractAttribute &AA) { 2316 // We can try to determine information from 2317 // the call sites. However, this is only possible all call sites are known, 2318 // hence the function has internal linkage. 2319 if (RequireAllCallSites && !F.hasInternalLinkage()) { 2320 LLVM_DEBUG( 2321 dbgs() 2322 << "Attributor: Function " << F.getName() 2323 << " has no internal linkage, hence not all call sites are known\n"); 2324 return false; 2325 } 2326 2327 for (const Use &U : F.uses()) { 2328 Instruction *I = cast<Instruction>(U.getUser()); 2329 Function *AnchorValue = I->getParent()->getParent(); 2330 2331 auto *LivenessAA = getAAFor<AAIsDead>(AA, *AnchorValue); 2332 2333 // Skip dead calls. 2334 if (LivenessAA && LivenessAA->isAssumedDead(I)) 2335 continue; 2336 2337 CallSite CS(U.getUser()); 2338 if (!CS || !CS.isCallee(&U) || !CS.getCaller()->hasExactDefinition()) { 2339 if (!RequireAllCallSites) 2340 continue; 2341 2342 LLVM_DEBUG(dbgs() << "Attributor: User " << *U.getUser() 2343 << " is an invalid use of " << F.getName() << "\n"); 2344 return false; 2345 } 2346 2347 if (Pred(CS)) 2348 continue; 2349 2350 LLVM_DEBUG(dbgs() << "Attributor: Call site callback failed for " 2351 << *CS.getInstruction() << "\n"); 2352 return false; 2353 } 2354 2355 return true; 2356 } 2357 2358 ChangeStatus Attributor::run() { 2359 // Initialize all abstract attributes. 2360 for (AbstractAttribute *AA : AllAbstractAttributes) 2361 AA->initialize(*this); 2362 2363 LLVM_DEBUG(dbgs() << "[Attributor] Identified and initialized " 2364 << AllAbstractAttributes.size() 2365 << " abstract attributes.\n"); 2366 2367 // Now that all abstract attributes are collected and initialized we start 2368 // the abstract analysis. 2369 2370 unsigned IterationCounter = 1; 2371 2372 SmallVector<AbstractAttribute *, 64> ChangedAAs; 2373 SetVector<AbstractAttribute *> Worklist; 2374 Worklist.insert(AllAbstractAttributes.begin(), AllAbstractAttributes.end()); 2375 2376 do { 2377 LLVM_DEBUG(dbgs() << "\n\n[Attributor] #Iteration: " << IterationCounter 2378 << ", Worklist size: " << Worklist.size() << "\n"); 2379 2380 // Add all abstract attributes that are potentially dependent on one that 2381 // changed to the work list. 2382 for (AbstractAttribute *ChangedAA : ChangedAAs) { 2383 auto &QuerriedAAs = QueryMap[ChangedAA]; 2384 Worklist.insert(QuerriedAAs.begin(), QuerriedAAs.end()); 2385 } 2386 2387 // Reset the changed set. 2388 ChangedAAs.clear(); 2389 2390 // Update all abstract attribute in the work list and record the ones that 2391 // changed. 2392 for (AbstractAttribute *AA : Worklist) 2393 if (AA->update(*this) == ChangeStatus::CHANGED) 2394 ChangedAAs.push_back(AA); 2395 2396 // Reset the work list and repopulate with the changed abstract attributes. 2397 // Note that dependent ones are added above. 2398 Worklist.clear(); 2399 Worklist.insert(ChangedAAs.begin(), ChangedAAs.end()); 2400 2401 } while (!Worklist.empty() && ++IterationCounter < MaxFixpointIterations); 2402 2403 LLVM_DEBUG(dbgs() << "\n[Attributor] Fixpoint iteration done after: " 2404 << IterationCounter << "/" << MaxFixpointIterations 2405 << " iterations\n"); 2406 2407 bool FinishedAtFixpoint = Worklist.empty(); 2408 2409 // Reset abstract arguments not settled in a sound fixpoint by now. This 2410 // happens when we stopped the fixpoint iteration early. Note that only the 2411 // ones marked as "changed" *and* the ones transitively depending on them 2412 // need to be reverted to a pessimistic state. Others might not be in a 2413 // fixpoint state but we can use the optimistic results for them anyway. 2414 SmallPtrSet<AbstractAttribute *, 32> Visited; 2415 for (unsigned u = 0; u < ChangedAAs.size(); u++) { 2416 AbstractAttribute *ChangedAA = ChangedAAs[u]; 2417 if (!Visited.insert(ChangedAA).second) 2418 continue; 2419 2420 AbstractState &State = ChangedAA->getState(); 2421 if (!State.isAtFixpoint()) { 2422 State.indicatePessimisticFixpoint(); 2423 2424 NumAttributesTimedOut++; 2425 } 2426 2427 auto &QuerriedAAs = QueryMap[ChangedAA]; 2428 ChangedAAs.append(QuerriedAAs.begin(), QuerriedAAs.end()); 2429 } 2430 2431 LLVM_DEBUG({ 2432 if (!Visited.empty()) 2433 dbgs() << "\n[Attributor] Finalized " << Visited.size() 2434 << " abstract attributes.\n"; 2435 }); 2436 2437 unsigned NumManifested = 0; 2438 unsigned NumAtFixpoint = 0; 2439 ChangeStatus ManifestChange = ChangeStatus::UNCHANGED; 2440 for (AbstractAttribute *AA : AllAbstractAttributes) { 2441 AbstractState &State = AA->getState(); 2442 2443 // If there is not already a fixpoint reached, we can now take the 2444 // optimistic state. This is correct because we enforced a pessimistic one 2445 // on abstract attributes that were transitively dependent on a changed one 2446 // already above. 2447 if (!State.isAtFixpoint()) 2448 State.indicateOptimisticFixpoint(); 2449 2450 // If the state is invalid, we do not try to manifest it. 2451 if (!State.isValidState()) 2452 continue; 2453 2454 // Manifest the state and record if we changed the IR. 2455 ChangeStatus LocalChange = AA->manifest(*this); 2456 ManifestChange = ManifestChange | LocalChange; 2457 2458 NumAtFixpoint++; 2459 NumManifested += (LocalChange == ChangeStatus::CHANGED); 2460 } 2461 2462 (void)NumManifested; 2463 (void)NumAtFixpoint; 2464 LLVM_DEBUG(dbgs() << "\n[Attributor] Manifested " << NumManifested 2465 << " arguments while " << NumAtFixpoint 2466 << " were in a valid fixpoint state\n"); 2467 2468 // If verification is requested, we finished this run at a fixpoint, and the 2469 // IR was changed, we re-run the whole fixpoint analysis, starting at 2470 // re-initialization of the arguments. This re-run should not result in an IR 2471 // change. Though, the (virtual) state of attributes at the end of the re-run 2472 // might be more optimistic than the known state or the IR state if the better 2473 // state cannot be manifested. 2474 if (VerifyAttributor && FinishedAtFixpoint && 2475 ManifestChange == ChangeStatus::CHANGED) { 2476 VerifyAttributor = false; 2477 ChangeStatus VerifyStatus = run(); 2478 if (VerifyStatus != ChangeStatus::UNCHANGED) 2479 llvm_unreachable( 2480 "Attributor verification failed, re-run did result in an IR change " 2481 "even after a fixpoint was reached in the original run. (False " 2482 "positives possible!)"); 2483 VerifyAttributor = true; 2484 } 2485 2486 NumAttributesManifested += NumManifested; 2487 NumAttributesValidFixpoint += NumAtFixpoint; 2488 2489 return ManifestChange; 2490 } 2491 2492 void Attributor::identifyDefaultAbstractAttributes( 2493 Function &F, InformationCache &InfoCache, 2494 DenseSet</* Attribute::AttrKind */ unsigned> *Whitelist) { 2495 2496 // Every function can be nounwind. 2497 registerAA(*new AANoUnwindFunction(F, InfoCache)); 2498 2499 // Every function might be marked "nosync" 2500 registerAA(*new AANoSyncFunction(F, InfoCache)); 2501 2502 // Every function might be "no-free". 2503 registerAA(*new AANoFreeFunction(F, InfoCache)); 2504 2505 // Return attributes are only appropriate if the return type is non void. 2506 Type *ReturnType = F.getReturnType(); 2507 if (!ReturnType->isVoidTy()) { 2508 // Argument attribute "returned" --- Create only one per function even 2509 // though it is an argument attribute. 2510 if (!Whitelist || Whitelist->count(AAReturnedValues::ID)) 2511 registerAA(*new AAReturnedValuesImpl(F, InfoCache)); 2512 2513 if (ReturnType->isPointerTy()) { 2514 // Every function with pointer return type might be marked align. 2515 if (!Whitelist || Whitelist->count(AAAlignReturned::ID)) 2516 registerAA(*new AAAlignReturned(F, InfoCache)); 2517 2518 // Every function with pointer return type might be marked nonnull. 2519 if (!Whitelist || Whitelist->count(AANonNullReturned::ID)) 2520 registerAA(*new AANonNullReturned(F, InfoCache)); 2521 2522 // Every function with pointer return type might be marked noalias. 2523 if (!Whitelist || Whitelist->count(AANoAliasReturned::ID)) 2524 registerAA(*new AANoAliasReturned(F, InfoCache)); 2525 2526 // Every function with pointer return type might be marked 2527 // dereferenceable. 2528 if (ReturnType->isPointerTy() && 2529 (!Whitelist || Whitelist->count(AADereferenceableReturned::ID))) 2530 registerAA(*new AADereferenceableReturned(F, InfoCache)); 2531 } 2532 } 2533 2534 for (Argument &Arg : F.args()) { 2535 if (Arg.getType()->isPointerTy()) { 2536 // Every argument with pointer type might be marked nonnull. 2537 if (!Whitelist || Whitelist->count(AANonNullArgument::ID)) 2538 registerAA(*new AANonNullArgument(Arg, InfoCache)); 2539 2540 // Every argument with pointer type might be marked dereferenceable. 2541 if (!Whitelist || Whitelist->count(AADereferenceableArgument::ID)) 2542 registerAA(*new AADereferenceableArgument(Arg, InfoCache)); 2543 2544 // Every argument with pointer type might be marked align. 2545 if (!Whitelist || Whitelist->count(AAAlignArgument::ID)) 2546 registerAA(*new AAAlignArgument(Arg, InfoCache)); 2547 } 2548 } 2549 2550 // Every function might be "will-return". 2551 registerAA(*new AAWillReturnFunction(F, InfoCache)); 2552 2553 // Check for dead BasicBlocks in every function. 2554 registerAA(*new AAIsDeadFunction(F, InfoCache)); 2555 2556 // Walk all instructions to find more attribute opportunities and also 2557 // interesting instructions that might be queried by abstract attributes 2558 // during their initialization or update. 2559 auto &ReadOrWriteInsts = InfoCache.FuncRWInstsMap[&F]; 2560 auto &InstOpcodeMap = InfoCache.FuncInstOpcodeMap[&F]; 2561 2562 for (Instruction &I : instructions(&F)) { 2563 bool IsInterestingOpcode = false; 2564 2565 // To allow easy access to all instructions in a function with a given 2566 // opcode we store them in the InfoCache. As not all opcodes are interesting 2567 // to concrete attributes we only cache the ones that are as identified in 2568 // the following switch. 2569 // Note: There are no concrete attributes now so this is initially empty. 2570 switch (I.getOpcode()) { 2571 default: 2572 assert((!ImmutableCallSite(&I)) && (!isa<CallBase>(&I)) && 2573 "New call site/base instruction type needs to be known int the " 2574 "attributor."); 2575 break; 2576 case Instruction::Call: 2577 case Instruction::CallBr: 2578 case Instruction::Invoke: 2579 case Instruction::CleanupRet: 2580 case Instruction::CatchSwitch: 2581 case Instruction::Resume: 2582 case Instruction::Ret: 2583 IsInterestingOpcode = true; 2584 } 2585 if (IsInterestingOpcode) 2586 InstOpcodeMap[I.getOpcode()].push_back(&I); 2587 if (I.mayReadOrWriteMemory()) 2588 ReadOrWriteInsts.push_back(&I); 2589 2590 CallSite CS(&I); 2591 if (CS && CS.getCalledFunction()) { 2592 for (int i = 0, e = CS.getCalledFunction()->arg_size(); i < e; i++) { 2593 if (!CS.getArgument(i)->getType()->isPointerTy()) 2594 continue; 2595 2596 // Call site argument attribute "non-null". 2597 if (!Whitelist || Whitelist->count(AANonNullCallSiteArgument::ID)) 2598 registerAA(*new AANonNullCallSiteArgument(CS, i, InfoCache), i); 2599 2600 // Call site argument attribute "dereferenceable". 2601 if (!Whitelist || 2602 Whitelist->count(AADereferenceableCallSiteArgument::ID)) 2603 registerAA(*new AADereferenceableCallSiteArgument(CS, i, InfoCache), 2604 i); 2605 2606 // Call site argument attribute "align". 2607 if (!Whitelist || Whitelist->count(AAAlignCallSiteArgument::ID)) 2608 registerAA(*new AAAlignCallSiteArgument(CS, i, InfoCache), i); 2609 } 2610 } 2611 } 2612 } 2613 2614 /// Helpers to ease debugging through output streams and print calls. 2615 /// 2616 ///{ 2617 raw_ostream &llvm::operator<<(raw_ostream &OS, ChangeStatus S) { 2618 return OS << (S == ChangeStatus::CHANGED ? "changed" : "unchanged"); 2619 } 2620 2621 raw_ostream &llvm::operator<<(raw_ostream &OS, 2622 AbstractAttribute::ManifestPosition AP) { 2623 switch (AP) { 2624 case AbstractAttribute::MP_ARGUMENT: 2625 return OS << "arg"; 2626 case AbstractAttribute::MP_CALL_SITE_ARGUMENT: 2627 return OS << "cs_arg"; 2628 case AbstractAttribute::MP_FUNCTION: 2629 return OS << "fn"; 2630 case AbstractAttribute::MP_RETURNED: 2631 return OS << "fn_ret"; 2632 } 2633 llvm_unreachable("Unknown attribute position!"); 2634 } 2635 2636 raw_ostream &llvm::operator<<(raw_ostream &OS, const AbstractState &S) { 2637 return OS << (!S.isValidState() ? "top" : (S.isAtFixpoint() ? "fix" : "")); 2638 } 2639 2640 raw_ostream &llvm::operator<<(raw_ostream &OS, const AbstractAttribute &AA) { 2641 AA.print(OS); 2642 return OS; 2643 } 2644 2645 void AbstractAttribute::print(raw_ostream &OS) const { 2646 OS << "[" << getManifestPosition() << "][" << getAsStr() << "][" 2647 << AnchoredVal.getName() << "]"; 2648 } 2649 ///} 2650 2651 /// ---------------------------------------------------------------------------- 2652 /// Pass (Manager) Boilerplate 2653 /// ---------------------------------------------------------------------------- 2654 2655 static bool runAttributorOnModule(Module &M) { 2656 if (DisableAttributor) 2657 return false; 2658 2659 LLVM_DEBUG(dbgs() << "[Attributor] Run on module with " << M.size() 2660 << " functions.\n"); 2661 2662 // Create an Attributor and initially empty information cache that is filled 2663 // while we identify default attribute opportunities. 2664 Attributor A; 2665 InformationCache InfoCache; 2666 2667 for (Function &F : M) { 2668 // TODO: Not all attributes require an exact definition. Find a way to 2669 // enable deduction for some but not all attributes in case the 2670 // definition might be changed at runtime, see also 2671 // http://lists.llvm.org/pipermail/llvm-dev/2018-February/121275.html. 2672 // TODO: We could always determine abstract attributes and if sufficient 2673 // information was found we could duplicate the functions that do not 2674 // have an exact definition. 2675 if (!F.hasExactDefinition()) { 2676 NumFnWithoutExactDefinition++; 2677 continue; 2678 } 2679 2680 // For now we ignore naked and optnone functions. 2681 if (F.hasFnAttribute(Attribute::Naked) || 2682 F.hasFnAttribute(Attribute::OptimizeNone)) 2683 continue; 2684 2685 NumFnWithExactDefinition++; 2686 2687 // Populate the Attributor with abstract attribute opportunities in the 2688 // function and the information cache with IR information. 2689 A.identifyDefaultAbstractAttributes(F, InfoCache); 2690 } 2691 2692 return A.run() == ChangeStatus::CHANGED; 2693 } 2694 2695 PreservedAnalyses AttributorPass::run(Module &M, ModuleAnalysisManager &AM) { 2696 if (runAttributorOnModule(M)) { 2697 // FIXME: Think about passes we will preserve and add them here. 2698 return PreservedAnalyses::none(); 2699 } 2700 return PreservedAnalyses::all(); 2701 } 2702 2703 namespace { 2704 2705 struct AttributorLegacyPass : public ModulePass { 2706 static char ID; 2707 2708 AttributorLegacyPass() : ModulePass(ID) { 2709 initializeAttributorLegacyPassPass(*PassRegistry::getPassRegistry()); 2710 } 2711 2712 bool runOnModule(Module &M) override { 2713 if (skipModule(M)) 2714 return false; 2715 return runAttributorOnModule(M); 2716 } 2717 2718 void getAnalysisUsage(AnalysisUsage &AU) const override { 2719 // FIXME: Think about passes we will preserve and add them here. 2720 AU.setPreservesCFG(); 2721 } 2722 }; 2723 2724 } // end anonymous namespace 2725 2726 Pass *llvm::createAttributorLegacyPass() { return new AttributorLegacyPass(); } 2727 2728 char AttributorLegacyPass::ID = 0; 2729 INITIALIZE_PASS_BEGIN(AttributorLegacyPass, "attributor", 2730 "Deduce and propagate attributes", false, false) 2731 INITIALIZE_PASS_END(AttributorLegacyPass, "attributor", 2732 "Deduce and propagate attributes", false, false) 2733