10456327cSAdam Nemet //===- LoopAccessAnalysis.cpp - Loop Access Analysis Implementation --------==//
20456327cSAdam Nemet //
30456327cSAdam Nemet //                     The LLVM Compiler Infrastructure
40456327cSAdam Nemet //
50456327cSAdam Nemet // This file is distributed under the University of Illinois Open Source
60456327cSAdam Nemet // License. See LICENSE.TXT for details.
70456327cSAdam Nemet //
80456327cSAdam Nemet //===----------------------------------------------------------------------===//
90456327cSAdam Nemet //
100456327cSAdam Nemet // The implementation for the loop memory dependence that was originally
110456327cSAdam Nemet // developed for the loop vectorizer.
120456327cSAdam Nemet //
130456327cSAdam Nemet //===----------------------------------------------------------------------===//
140456327cSAdam Nemet 
150456327cSAdam Nemet #include "llvm/Analysis/LoopAccessAnalysis.h"
160456327cSAdam Nemet #include "llvm/Analysis/LoopInfo.h"
178a021317SXinliang David Li #include "llvm/Analysis/LoopPassManager.h"
185b3a5cf6SAdam Nemet #include "llvm/Analysis/OptimizationDiagnosticInfo.h"
197206d7a5SAdam Nemet #include "llvm/Analysis/ScalarEvolutionExpander.h"
20799003bfSBenjamin Kramer #include "llvm/Analysis/TargetLibraryInfo.h"
210456327cSAdam Nemet #include "llvm/Analysis/ValueTracking.h"
22f45594c9SAdam Nemet #include "llvm/Analysis/VectorUtils.h"
230456327cSAdam Nemet #include "llvm/IR/Dominators.h"
247206d7a5SAdam Nemet #include "llvm/IR/IRBuilder.h"
258a021317SXinliang David Li #include "llvm/IR/PassManager.h"
260456327cSAdam Nemet #include "llvm/Support/Debug.h"
27799003bfSBenjamin Kramer #include "llvm/Support/raw_ostream.h"
280456327cSAdam Nemet using namespace llvm;
290456327cSAdam Nemet 
30339f42b3SAdam Nemet #define DEBUG_TYPE "loop-accesses"
310456327cSAdam Nemet 
32f219c647SAdam Nemet static cl::opt<unsigned, true>
33f219c647SAdam Nemet VectorizationFactor("force-vector-width", cl::Hidden,
34f219c647SAdam Nemet                     cl::desc("Sets the SIMD width. Zero is autoselect."),
35f219c647SAdam Nemet                     cl::location(VectorizerParams::VectorizationFactor));
361d862af7SAdam Nemet unsigned VectorizerParams::VectorizationFactor;
37f219c647SAdam Nemet 
38f219c647SAdam Nemet static cl::opt<unsigned, true>
39f219c647SAdam Nemet VectorizationInterleave("force-vector-interleave", cl::Hidden,
40f219c647SAdam Nemet                         cl::desc("Sets the vectorization interleave count. "
41f219c647SAdam Nemet                                  "Zero is autoselect."),
42f219c647SAdam Nemet                         cl::location(
43f219c647SAdam Nemet                             VectorizerParams::VectorizationInterleave));
441d862af7SAdam Nemet unsigned VectorizerParams::VectorizationInterleave;
45f219c647SAdam Nemet 
461d862af7SAdam Nemet static cl::opt<unsigned, true> RuntimeMemoryCheckThreshold(
471d862af7SAdam Nemet     "runtime-memory-check-threshold", cl::Hidden,
481d862af7SAdam Nemet     cl::desc("When performing memory disambiguation checks at runtime do not "
491d862af7SAdam Nemet              "generate more than this number of comparisons (default = 8)."),
501d862af7SAdam Nemet     cl::location(VectorizerParams::RuntimeMemoryCheckThreshold), cl::init(8));
511d862af7SAdam Nemet unsigned VectorizerParams::RuntimeMemoryCheckThreshold;
52f219c647SAdam Nemet 
531b6b50a9SSilviu Baranga /// \brief The maximum iterations used to merge memory checks
541b6b50a9SSilviu Baranga static cl::opt<unsigned> MemoryCheckMergeThreshold(
551b6b50a9SSilviu Baranga     "memory-check-merge-threshold", cl::Hidden,
561b6b50a9SSilviu Baranga     cl::desc("Maximum number of comparisons done when trying to merge "
571b6b50a9SSilviu Baranga              "runtime memory checks. (default = 100)"),
581b6b50a9SSilviu Baranga     cl::init(100));
591b6b50a9SSilviu Baranga 
60f219c647SAdam Nemet /// Maximum SIMD width.
61f219c647SAdam Nemet const unsigned VectorizerParams::MaxVectorWidth = 64;
62f219c647SAdam Nemet 
63a2df750fSAdam Nemet /// \brief We collect dependences up to this threshold.
64a2df750fSAdam Nemet static cl::opt<unsigned>
65a2df750fSAdam Nemet     MaxDependences("max-dependences", cl::Hidden,
66a2df750fSAdam Nemet                    cl::desc("Maximum number of dependences collected by "
679c926579SAdam Nemet                             "loop-access analysis (default = 100)"),
689c926579SAdam Nemet                    cl::init(100));
699c926579SAdam Nemet 
70a9f09c62SAdam Nemet /// This enables versioning on the strides of symbolically striding memory
71a9f09c62SAdam Nemet /// accesses in code like the following.
72a9f09c62SAdam Nemet ///   for (i = 0; i < N; ++i)
73a9f09c62SAdam Nemet ///     A[i * Stride1] += B[i * Stride2] ...
74a9f09c62SAdam Nemet ///
75a9f09c62SAdam Nemet /// Will be roughly translated to
76a9f09c62SAdam Nemet ///    if (Stride1 == 1 && Stride2 == 1) {
77a9f09c62SAdam Nemet ///      for (i = 0; i < N; i+=4)
78a9f09c62SAdam Nemet ///       A[i:i+3] += ...
79a9f09c62SAdam Nemet ///    } else
80a9f09c62SAdam Nemet ///      ...
81a9f09c62SAdam Nemet static cl::opt<bool> EnableMemAccessVersioning(
82a9f09c62SAdam Nemet     "enable-mem-access-versioning", cl::init(true), cl::Hidden,
83a9f09c62SAdam Nemet     cl::desc("Enable symbolic stride memory access versioning"));
84a9f09c62SAdam Nemet 
8537ec5f91SMatthew Simpson /// \brief Enable store-to-load forwarding conflict detection. This option can
8637ec5f91SMatthew Simpson /// be disabled for correctness testing.
8737ec5f91SMatthew Simpson static cl::opt<bool> EnableForwardingConflictDetection(
8837ec5f91SMatthew Simpson     "store-to-load-forwarding-conflict-detection", cl::Hidden,
89a250dc9fSMatthew Simpson     cl::desc("Enable conflict detection in loop-access analysis"),
90a250dc9fSMatthew Simpson     cl::init(true));
91a250dc9fSMatthew Simpson 
92f219c647SAdam Nemet bool VectorizerParams::isInterleaveForced() {
93f219c647SAdam Nemet   return ::VectorizationInterleave.getNumOccurrences() > 0;
94f219c647SAdam Nemet }
95f219c647SAdam Nemet 
962bd6e984SAdam Nemet void LoopAccessReport::emitAnalysis(const LoopAccessReport &Message,
975b3a5cf6SAdam Nemet                                     const Loop *TheLoop, const char *PassName,
985b3a5cf6SAdam Nemet                                     OptimizationRemarkEmitter &ORE) {
990456327cSAdam Nemet   DebugLoc DL = TheLoop->getStartLoc();
1005b3a5cf6SAdam Nemet   const Value *V = TheLoop->getHeader();
1015b3a5cf6SAdam Nemet   if (const Instruction *I = Message.getInstr()) {
102e3cef937SAdam Nemet     // If there is no debug location attached to the instruction, revert back to
103e3cef937SAdam Nemet     // using the loop's.
104e3cef937SAdam Nemet     if (I->getDebugLoc())
1050456327cSAdam Nemet       DL = I->getDebugLoc();
1065b3a5cf6SAdam Nemet     V = I->getParent();
1075b3a5cf6SAdam Nemet   }
1085b3a5cf6SAdam Nemet   ORE.emitOptimizationRemarkAnalysis(PassName, DL, V, Message.str());
1090456327cSAdam Nemet }
1100456327cSAdam Nemet 
1110456327cSAdam Nemet Value *llvm::stripIntegerCast(Value *V) {
1128b401013SDavid Majnemer   if (auto *CI = dyn_cast<CastInst>(V))
1130456327cSAdam Nemet     if (CI->getOperand(0)->getType()->isIntegerTy())
1140456327cSAdam Nemet       return CI->getOperand(0);
1150456327cSAdam Nemet   return V;
1160456327cSAdam Nemet }
1170456327cSAdam Nemet 
1189cd9a7e3SSilviu Baranga const SCEV *llvm::replaceSymbolicStrideSCEV(PredicatedScalarEvolution &PSE,
1198bc61df9SAdam Nemet                                             const ValueToValueMap &PtrToStride,
1200456327cSAdam Nemet                                             Value *Ptr, Value *OrigPtr) {
1219cd9a7e3SSilviu Baranga   const SCEV *OrigSCEV = PSE.getSCEV(Ptr);
1220456327cSAdam Nemet 
1230456327cSAdam Nemet   // If there is an entry in the map return the SCEV of the pointer with the
1240456327cSAdam Nemet   // symbolic stride replaced by one.
1258bc61df9SAdam Nemet   ValueToValueMap::const_iterator SI =
1268bc61df9SAdam Nemet       PtrToStride.find(OrigPtr ? OrigPtr : Ptr);
1270456327cSAdam Nemet   if (SI != PtrToStride.end()) {
1280456327cSAdam Nemet     Value *StrideVal = SI->second;
1290456327cSAdam Nemet 
1300456327cSAdam Nemet     // Strip casts.
1310456327cSAdam Nemet     StrideVal = stripIntegerCast(StrideVal);
1320456327cSAdam Nemet 
1330456327cSAdam Nemet     // Replace symbolic stride by one.
1340456327cSAdam Nemet     Value *One = ConstantInt::get(StrideVal->getType(), 1);
1350456327cSAdam Nemet     ValueToValueMap RewriteMap;
1360456327cSAdam Nemet     RewriteMap[StrideVal] = One;
1370456327cSAdam Nemet 
1389cd9a7e3SSilviu Baranga     ScalarEvolution *SE = PSE.getSE();
139e3c0534bSSilviu Baranga     const auto *U = cast<SCEVUnknown>(SE->getSCEV(StrideVal));
140e3c0534bSSilviu Baranga     const auto *CT =
141e3c0534bSSilviu Baranga         static_cast<const SCEVConstant *>(SE->getOne(StrideVal->getType()));
142e3c0534bSSilviu Baranga 
1439cd9a7e3SSilviu Baranga     PSE.addPredicate(*SE->getEqualPredicate(U, CT));
1449cd9a7e3SSilviu Baranga     auto *Expr = PSE.getSCEV(Ptr);
145e3c0534bSSilviu Baranga 
1469cd9a7e3SSilviu Baranga     DEBUG(dbgs() << "LAA: Replacing SCEV: " << *OrigSCEV << " by: " << *Expr
1470456327cSAdam Nemet                  << "\n");
1489cd9a7e3SSilviu Baranga     return Expr;
1490456327cSAdam Nemet   }
1500456327cSAdam Nemet 
1510456327cSAdam Nemet   // Otherwise, just return the SCEV of the original pointer.
152e3c0534bSSilviu Baranga   return OrigSCEV;
1530456327cSAdam Nemet }
1540456327cSAdam Nemet 
1553622fbfcSElena Demikhovsky /// Calculate Start and End points of memory access.
1563622fbfcSElena Demikhovsky /// Let's assume A is the first access and B is a memory access on N-th loop
1573622fbfcSElena Demikhovsky /// iteration. Then B is calculated as:
1583622fbfcSElena Demikhovsky ///   B = A + Step*N .
1593622fbfcSElena Demikhovsky /// Step value may be positive or negative.
1603622fbfcSElena Demikhovsky /// N is a calculated back-edge taken count:
1613622fbfcSElena Demikhovsky ///     N = (TripCount > 0) ? RoundDown(TripCount -1 , VF) : 0
1623622fbfcSElena Demikhovsky /// Start and End points are calculated in the following way:
1633622fbfcSElena Demikhovsky /// Start = UMIN(A, B) ; End = UMAX(A, B) + SizeOfElt,
1643622fbfcSElena Demikhovsky /// where SizeOfElt is the size of single memory access in bytes.
1653622fbfcSElena Demikhovsky ///
1663622fbfcSElena Demikhovsky /// There is no conflict when the intervals are disjoint:
1673622fbfcSElena Demikhovsky /// NoConflict = (P2.Start >= P1.End) || (P1.Start >= P2.End)
1687cdebac0SAdam Nemet void RuntimePointerChecking::insert(Loop *Lp, Value *Ptr, bool WritePtr,
1697cdebac0SAdam Nemet                                     unsigned DepSetId, unsigned ASId,
170e3c0534bSSilviu Baranga                                     const ValueToValueMap &Strides,
1719cd9a7e3SSilviu Baranga                                     PredicatedScalarEvolution &PSE) {
1720456327cSAdam Nemet   // Get the stride replaced scev.
1739cd9a7e3SSilviu Baranga   const SCEV *Sc = replaceSymbolicStrideSCEV(PSE, Strides, Ptr);
174279784ffSAdam Nemet   ScalarEvolution *SE = PSE.getSE();
175279784ffSAdam Nemet 
176279784ffSAdam Nemet   const SCEV *ScStart;
177279784ffSAdam Nemet   const SCEV *ScEnd;
178279784ffSAdam Nemet 
17959a65504SAdam Nemet   if (SE->isLoopInvariant(Sc, Lp))
180279784ffSAdam Nemet     ScStart = ScEnd = Sc;
181279784ffSAdam Nemet   else {
1820456327cSAdam Nemet     const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(Sc);
1830456327cSAdam Nemet     assert(AR && "Invalid addrec expression");
1846f444dfdSSilviu Baranga     const SCEV *Ex = PSE.getBackedgeTakenCount();
1850e5804a6SSilviu Baranga 
186279784ffSAdam Nemet     ScStart = AR->getStart();
187279784ffSAdam Nemet     ScEnd = AR->evaluateAtIteration(Ex, *SE);
1880e5804a6SSilviu Baranga     const SCEV *Step = AR->getStepRecurrence(*SE);
1890e5804a6SSilviu Baranga 
1900e5804a6SSilviu Baranga     // For expressions with negative step, the upper bound is ScStart and the
1910e5804a6SSilviu Baranga     // lower bound is ScEnd.
1928b401013SDavid Majnemer     if (const auto *CStep = dyn_cast<SCEVConstant>(Step)) {
1930e5804a6SSilviu Baranga       if (CStep->getValue()->isNegative())
1940e5804a6SSilviu Baranga         std::swap(ScStart, ScEnd);
1950e5804a6SSilviu Baranga     } else {
1963622fbfcSElena Demikhovsky       // Fallback case: the step is not constant, but we can still
1970e5804a6SSilviu Baranga       // get the upper and lower bounds of the interval by using min/max
1980e5804a6SSilviu Baranga       // expressions.
1990e5804a6SSilviu Baranga       ScStart = SE->getUMinExpr(ScStart, ScEnd);
2000e5804a6SSilviu Baranga       ScEnd = SE->getUMaxExpr(AR->getStart(), ScEnd);
2010e5804a6SSilviu Baranga     }
2023622fbfcSElena Demikhovsky     // Add the size of the pointed element to ScEnd.
2033622fbfcSElena Demikhovsky     unsigned EltSize =
2043622fbfcSElena Demikhovsky       Ptr->getType()->getPointerElementType()->getScalarSizeInBits() / 8;
2053622fbfcSElena Demikhovsky     const SCEV *EltSizeSCEV = SE->getConstant(ScEnd->getType(), EltSize);
2063622fbfcSElena Demikhovsky     ScEnd = SE->getAddExpr(ScEnd, EltSizeSCEV);
207279784ffSAdam Nemet   }
2080e5804a6SSilviu Baranga 
2090e5804a6SSilviu Baranga   Pointers.emplace_back(Ptr, ScStart, ScEnd, WritePtr, DepSetId, ASId, Sc);
2101b6b50a9SSilviu Baranga }
2111b6b50a9SSilviu Baranga 
212bbe1f1deSAdam Nemet SmallVector<RuntimePointerChecking::PointerCheck, 4>
21338530887SAdam Nemet RuntimePointerChecking::generateChecks() const {
214bbe1f1deSAdam Nemet   SmallVector<PointerCheck, 4> Checks;
215bbe1f1deSAdam Nemet 
2167c52e052SAdam Nemet   for (unsigned I = 0; I < CheckingGroups.size(); ++I) {
2177c52e052SAdam Nemet     for (unsigned J = I + 1; J < CheckingGroups.size(); ++J) {
2187c52e052SAdam Nemet       const RuntimePointerChecking::CheckingPtrGroup &CGI = CheckingGroups[I];
2197c52e052SAdam Nemet       const RuntimePointerChecking::CheckingPtrGroup &CGJ = CheckingGroups[J];
220bbe1f1deSAdam Nemet 
22138530887SAdam Nemet       if (needsChecking(CGI, CGJ))
222bbe1f1deSAdam Nemet         Checks.push_back(std::make_pair(&CGI, &CGJ));
223bbe1f1deSAdam Nemet     }
224bbe1f1deSAdam Nemet   }
225bbe1f1deSAdam Nemet   return Checks;
226bbe1f1deSAdam Nemet }
227bbe1f1deSAdam Nemet 
22815840393SAdam Nemet void RuntimePointerChecking::generateChecks(
22915840393SAdam Nemet     MemoryDepChecker::DepCandidates &DepCands, bool UseDependencies) {
23015840393SAdam Nemet   assert(Checks.empty() && "Checks is not empty");
23115840393SAdam Nemet   groupChecks(DepCands, UseDependencies);
23215840393SAdam Nemet   Checks = generateChecks();
23315840393SAdam Nemet }
23415840393SAdam Nemet 
235651a5a24SAdam Nemet bool RuntimePointerChecking::needsChecking(const CheckingPtrGroup &M,
236651a5a24SAdam Nemet                                            const CheckingPtrGroup &N) const {
2371b6b50a9SSilviu Baranga   for (unsigned I = 0, EI = M.Members.size(); EI != I; ++I)
2381b6b50a9SSilviu Baranga     for (unsigned J = 0, EJ = N.Members.size(); EJ != J; ++J)
239651a5a24SAdam Nemet       if (needsChecking(M.Members[I], N.Members[J]))
2401b6b50a9SSilviu Baranga         return true;
2411b6b50a9SSilviu Baranga   return false;
2421b6b50a9SSilviu Baranga }
2431b6b50a9SSilviu Baranga 
2441b6b50a9SSilviu Baranga /// Compare \p I and \p J and return the minimum.
2451b6b50a9SSilviu Baranga /// Return nullptr in case we couldn't find an answer.
2461b6b50a9SSilviu Baranga static const SCEV *getMinFromExprs(const SCEV *I, const SCEV *J,
2471b6b50a9SSilviu Baranga                                    ScalarEvolution *SE) {
2481b6b50a9SSilviu Baranga   const SCEV *Diff = SE->getMinusSCEV(J, I);
2491b6b50a9SSilviu Baranga   const SCEVConstant *C = dyn_cast<const SCEVConstant>(Diff);
2501b6b50a9SSilviu Baranga 
2511b6b50a9SSilviu Baranga   if (!C)
2521b6b50a9SSilviu Baranga     return nullptr;
2531b6b50a9SSilviu Baranga   if (C->getValue()->isNegative())
2541b6b50a9SSilviu Baranga     return J;
2551b6b50a9SSilviu Baranga   return I;
2561b6b50a9SSilviu Baranga }
2571b6b50a9SSilviu Baranga 
2587cdebac0SAdam Nemet bool RuntimePointerChecking::CheckingPtrGroup::addPointer(unsigned Index) {
2599f7dedc3SAdam Nemet   const SCEV *Start = RtCheck.Pointers[Index].Start;
2609f7dedc3SAdam Nemet   const SCEV *End = RtCheck.Pointers[Index].End;
2619f7dedc3SAdam Nemet 
2621b6b50a9SSilviu Baranga   // Compare the starts and ends with the known minimum and maximum
2631b6b50a9SSilviu Baranga   // of this set. We need to know how we compare against the min/max
2641b6b50a9SSilviu Baranga   // of the set in order to be able to emit memchecks.
2659f7dedc3SAdam Nemet   const SCEV *Min0 = getMinFromExprs(Start, Low, RtCheck.SE);
2661b6b50a9SSilviu Baranga   if (!Min0)
2671b6b50a9SSilviu Baranga     return false;
2681b6b50a9SSilviu Baranga 
2699f7dedc3SAdam Nemet   const SCEV *Min1 = getMinFromExprs(End, High, RtCheck.SE);
2701b6b50a9SSilviu Baranga   if (!Min1)
2711b6b50a9SSilviu Baranga     return false;
2721b6b50a9SSilviu Baranga 
2731b6b50a9SSilviu Baranga   // Update the low bound  expression if we've found a new min value.
2749f7dedc3SAdam Nemet   if (Min0 == Start)
2759f7dedc3SAdam Nemet     Low = Start;
2761b6b50a9SSilviu Baranga 
2771b6b50a9SSilviu Baranga   // Update the high bound expression if we've found a new max value.
2789f7dedc3SAdam Nemet   if (Min1 != End)
2799f7dedc3SAdam Nemet     High = End;
2801b6b50a9SSilviu Baranga 
2811b6b50a9SSilviu Baranga   Members.push_back(Index);
2821b6b50a9SSilviu Baranga   return true;
2831b6b50a9SSilviu Baranga }
2841b6b50a9SSilviu Baranga 
2857cdebac0SAdam Nemet void RuntimePointerChecking::groupChecks(
2867cdebac0SAdam Nemet     MemoryDepChecker::DepCandidates &DepCands, bool UseDependencies) {
2871b6b50a9SSilviu Baranga   // We build the groups from dependency candidates equivalence classes
2881b6b50a9SSilviu Baranga   // because:
2891b6b50a9SSilviu Baranga   //    - We know that pointers in the same equivalence class share
2901b6b50a9SSilviu Baranga   //      the same underlying object and therefore there is a chance
2911b6b50a9SSilviu Baranga   //      that we can compare pointers
2921b6b50a9SSilviu Baranga   //    - We wouldn't be able to merge two pointers for which we need
2931b6b50a9SSilviu Baranga   //      to emit a memcheck. The classes in DepCands are already
2941b6b50a9SSilviu Baranga   //      conveniently built such that no two pointers in the same
2951b6b50a9SSilviu Baranga   //      class need checking against each other.
2961b6b50a9SSilviu Baranga 
2971b6b50a9SSilviu Baranga   // We use the following (greedy) algorithm to construct the groups
2981b6b50a9SSilviu Baranga   // For every pointer in the equivalence class:
2991b6b50a9SSilviu Baranga   //   For each existing group:
3001b6b50a9SSilviu Baranga   //   - if the difference between this pointer and the min/max bounds
3011b6b50a9SSilviu Baranga   //     of the group is a constant, then make the pointer part of the
3021b6b50a9SSilviu Baranga   //     group and update the min/max bounds of that group as required.
3031b6b50a9SSilviu Baranga 
3041b6b50a9SSilviu Baranga   CheckingGroups.clear();
3051b6b50a9SSilviu Baranga 
30648250600SSilviu Baranga   // If we need to check two pointers to the same underlying object
30748250600SSilviu Baranga   // with a non-constant difference, we shouldn't perform any pointer
30848250600SSilviu Baranga   // grouping with those pointers. This is because we can easily get
30948250600SSilviu Baranga   // into cases where the resulting check would return false, even when
31048250600SSilviu Baranga   // the accesses are safe.
31148250600SSilviu Baranga   //
31248250600SSilviu Baranga   // The following example shows this:
31348250600SSilviu Baranga   // for (i = 0; i < 1000; ++i)
31448250600SSilviu Baranga   //   a[5000 + i * m] = a[i] + a[i + 9000]
31548250600SSilviu Baranga   //
31648250600SSilviu Baranga   // Here grouping gives a check of (5000, 5000 + 1000 * m) against
31748250600SSilviu Baranga   // (0, 10000) which is always false. However, if m is 1, there is no
31848250600SSilviu Baranga   // dependence. Not grouping the checks for a[i] and a[i + 9000] allows
31948250600SSilviu Baranga   // us to perform an accurate check in this case.
32048250600SSilviu Baranga   //
32148250600SSilviu Baranga   // The above case requires that we have an UnknownDependence between
32248250600SSilviu Baranga   // accesses to the same underlying object. This cannot happen unless
32348250600SSilviu Baranga   // ShouldRetryWithRuntimeCheck is set, and therefore UseDependencies
32448250600SSilviu Baranga   // is also false. In this case we will use the fallback path and create
32548250600SSilviu Baranga   // separate checking groups for all pointers.
32648250600SSilviu Baranga 
3271b6b50a9SSilviu Baranga   // If we don't have the dependency partitions, construct a new
32848250600SSilviu Baranga   // checking pointer group for each pointer. This is also required
32948250600SSilviu Baranga   // for correctness, because in this case we can have checking between
33048250600SSilviu Baranga   // pointers to the same underlying object.
3311b6b50a9SSilviu Baranga   if (!UseDependencies) {
3321b6b50a9SSilviu Baranga     for (unsigned I = 0; I < Pointers.size(); ++I)
3331b6b50a9SSilviu Baranga       CheckingGroups.push_back(CheckingPtrGroup(I, *this));
3341b6b50a9SSilviu Baranga     return;
3351b6b50a9SSilviu Baranga   }
3361b6b50a9SSilviu Baranga 
3371b6b50a9SSilviu Baranga   unsigned TotalComparisons = 0;
3381b6b50a9SSilviu Baranga 
3391b6b50a9SSilviu Baranga   DenseMap<Value *, unsigned> PositionMap;
3409f7dedc3SAdam Nemet   for (unsigned Index = 0; Index < Pointers.size(); ++Index)
3419f7dedc3SAdam Nemet     PositionMap[Pointers[Index].PointerValue] = Index;
3421b6b50a9SSilviu Baranga 
343ce3877fcSSilviu Baranga   // We need to keep track of what pointers we've already seen so we
344ce3877fcSSilviu Baranga   // don't process them twice.
345ce3877fcSSilviu Baranga   SmallSet<unsigned, 2> Seen;
346ce3877fcSSilviu Baranga 
347e4b9f507SSanjay Patel   // Go through all equivalence classes, get the "pointer check groups"
348ce3877fcSSilviu Baranga   // and add them to the overall solution. We use the order in which accesses
349ce3877fcSSilviu Baranga   // appear in 'Pointers' to enforce determinism.
350ce3877fcSSilviu Baranga   for (unsigned I = 0; I < Pointers.size(); ++I) {
351ce3877fcSSilviu Baranga     // We've seen this pointer before, and therefore already processed
352ce3877fcSSilviu Baranga     // its equivalence class.
353ce3877fcSSilviu Baranga     if (Seen.count(I))
3541b6b50a9SSilviu Baranga       continue;
3551b6b50a9SSilviu Baranga 
3569f7dedc3SAdam Nemet     MemoryDepChecker::MemAccessInfo Access(Pointers[I].PointerValue,
3579f7dedc3SAdam Nemet                                            Pointers[I].IsWritePtr);
3581b6b50a9SSilviu Baranga 
359ce3877fcSSilviu Baranga     SmallVector<CheckingPtrGroup, 2> Groups;
360ce3877fcSSilviu Baranga     auto LeaderI = DepCands.findValue(DepCands.getLeaderValue(Access));
361ce3877fcSSilviu Baranga 
362a647c30fSSilviu Baranga     // Because DepCands is constructed by visiting accesses in the order in
363a647c30fSSilviu Baranga     // which they appear in alias sets (which is deterministic) and the
364a647c30fSSilviu Baranga     // iteration order within an equivalence class member is only dependent on
365a647c30fSSilviu Baranga     // the order in which unions and insertions are performed on the
366a647c30fSSilviu Baranga     // equivalence class, the iteration order is deterministic.
367ce3877fcSSilviu Baranga     for (auto MI = DepCands.member_begin(LeaderI), ME = DepCands.member_end();
3681b6b50a9SSilviu Baranga          MI != ME; ++MI) {
3691b6b50a9SSilviu Baranga       unsigned Pointer = PositionMap[MI->getPointer()];
3701b6b50a9SSilviu Baranga       bool Merged = false;
371ce3877fcSSilviu Baranga       // Mark this pointer as seen.
372ce3877fcSSilviu Baranga       Seen.insert(Pointer);
3731b6b50a9SSilviu Baranga 
3741b6b50a9SSilviu Baranga       // Go through all the existing sets and see if we can find one
3751b6b50a9SSilviu Baranga       // which can include this pointer.
3761b6b50a9SSilviu Baranga       for (CheckingPtrGroup &Group : Groups) {
3771b6b50a9SSilviu Baranga         // Don't perform more than a certain amount of comparisons.
3781b6b50a9SSilviu Baranga         // This should limit the cost of grouping the pointers to something
3791b6b50a9SSilviu Baranga         // reasonable.  If we do end up hitting this threshold, the algorithm
3801b6b50a9SSilviu Baranga         // will create separate groups for all remaining pointers.
3811b6b50a9SSilviu Baranga         if (TotalComparisons > MemoryCheckMergeThreshold)
3821b6b50a9SSilviu Baranga           break;
3831b6b50a9SSilviu Baranga 
3841b6b50a9SSilviu Baranga         TotalComparisons++;
3851b6b50a9SSilviu Baranga 
3861b6b50a9SSilviu Baranga         if (Group.addPointer(Pointer)) {
3871b6b50a9SSilviu Baranga           Merged = true;
3881b6b50a9SSilviu Baranga           break;
3891b6b50a9SSilviu Baranga         }
3901b6b50a9SSilviu Baranga       }
3911b6b50a9SSilviu Baranga 
3921b6b50a9SSilviu Baranga       if (!Merged)
3931b6b50a9SSilviu Baranga         // We couldn't add this pointer to any existing set or the threshold
3941b6b50a9SSilviu Baranga         // for the number of comparisons has been reached. Create a new group
3951b6b50a9SSilviu Baranga         // to hold the current pointer.
3961b6b50a9SSilviu Baranga         Groups.push_back(CheckingPtrGroup(Pointer, *this));
3971b6b50a9SSilviu Baranga     }
3981b6b50a9SSilviu Baranga 
3991b6b50a9SSilviu Baranga     // We've computed the grouped checks for this partition.
4001b6b50a9SSilviu Baranga     // Save the results and continue with the next one.
4011b6b50a9SSilviu Baranga     std::copy(Groups.begin(), Groups.end(), std::back_inserter(CheckingGroups));
4021b6b50a9SSilviu Baranga   }
4030456327cSAdam Nemet }
4040456327cSAdam Nemet 
405041e6debSAdam Nemet bool RuntimePointerChecking::arePointersInSamePartition(
406041e6debSAdam Nemet     const SmallVectorImpl<int> &PtrToPartition, unsigned PtrIdx1,
407041e6debSAdam Nemet     unsigned PtrIdx2) {
408041e6debSAdam Nemet   return (PtrToPartition[PtrIdx1] != -1 &&
409041e6debSAdam Nemet           PtrToPartition[PtrIdx1] == PtrToPartition[PtrIdx2]);
410041e6debSAdam Nemet }
411041e6debSAdam Nemet 
412651a5a24SAdam Nemet bool RuntimePointerChecking::needsChecking(unsigned I, unsigned J) const {
4139f7dedc3SAdam Nemet   const PointerInfo &PointerI = Pointers[I];
4149f7dedc3SAdam Nemet   const PointerInfo &PointerJ = Pointers[J];
4159f7dedc3SAdam Nemet 
416a8945b77SAdam Nemet   // No need to check if two readonly pointers intersect.
4179f7dedc3SAdam Nemet   if (!PointerI.IsWritePtr && !PointerJ.IsWritePtr)
418a8945b77SAdam Nemet     return false;
419a8945b77SAdam Nemet 
420a8945b77SAdam Nemet   // Only need to check pointers between two different dependency sets.
4219f7dedc3SAdam Nemet   if (PointerI.DependencySetId == PointerJ.DependencySetId)
422a8945b77SAdam Nemet     return false;
423a8945b77SAdam Nemet 
424a8945b77SAdam Nemet   // Only need to check pointers in the same alias set.
4259f7dedc3SAdam Nemet   if (PointerI.AliasSetId != PointerJ.AliasSetId)
426a8945b77SAdam Nemet     return false;
427a8945b77SAdam Nemet 
428a8945b77SAdam Nemet   return true;
429a8945b77SAdam Nemet }
430a8945b77SAdam Nemet 
43154f0b83eSAdam Nemet void RuntimePointerChecking::printChecks(
43254f0b83eSAdam Nemet     raw_ostream &OS, const SmallVectorImpl<PointerCheck> &Checks,
43354f0b83eSAdam Nemet     unsigned Depth) const {
43454f0b83eSAdam Nemet   unsigned N = 0;
43554f0b83eSAdam Nemet   for (const auto &Check : Checks) {
43654f0b83eSAdam Nemet     const auto &First = Check.first->Members, &Second = Check.second->Members;
43754f0b83eSAdam Nemet 
43854f0b83eSAdam Nemet     OS.indent(Depth) << "Check " << N++ << ":\n";
43954f0b83eSAdam Nemet 
44054f0b83eSAdam Nemet     OS.indent(Depth + 2) << "Comparing group (" << Check.first << "):\n";
44154f0b83eSAdam Nemet     for (unsigned K = 0; K < First.size(); ++K)
44254f0b83eSAdam Nemet       OS.indent(Depth + 2) << *Pointers[First[K]].PointerValue << "\n";
44354f0b83eSAdam Nemet 
44454f0b83eSAdam Nemet     OS.indent(Depth + 2) << "Against group (" << Check.second << "):\n";
44554f0b83eSAdam Nemet     for (unsigned K = 0; K < Second.size(); ++K)
44654f0b83eSAdam Nemet       OS.indent(Depth + 2) << *Pointers[Second[K]].PointerValue << "\n";
44754f0b83eSAdam Nemet   }
44854f0b83eSAdam Nemet }
44954f0b83eSAdam Nemet 
4503a91e947SAdam Nemet void RuntimePointerChecking::print(raw_ostream &OS, unsigned Depth) const {
451e91cc6efSAdam Nemet 
452e91cc6efSAdam Nemet   OS.indent(Depth) << "Run-time memory checks:\n";
45315840393SAdam Nemet   printChecks(OS, Checks, Depth);
4541b6b50a9SSilviu Baranga 
4551b6b50a9SSilviu Baranga   OS.indent(Depth) << "Grouped accesses:\n";
4561b6b50a9SSilviu Baranga   for (unsigned I = 0; I < CheckingGroups.size(); ++I) {
45754f0b83eSAdam Nemet     const auto &CG = CheckingGroups[I];
45854f0b83eSAdam Nemet 
45954f0b83eSAdam Nemet     OS.indent(Depth + 2) << "Group " << &CG << ":\n";
46054f0b83eSAdam Nemet     OS.indent(Depth + 4) << "(Low: " << *CG.Low << " High: " << *CG.High
46154f0b83eSAdam Nemet                          << ")\n";
46254f0b83eSAdam Nemet     for (unsigned J = 0; J < CG.Members.size(); ++J) {
46354f0b83eSAdam Nemet       OS.indent(Depth + 6) << "Member: " << *Pointers[CG.Members[J]].Expr
4641b6b50a9SSilviu Baranga                            << "\n";
4651b6b50a9SSilviu Baranga     }
466e91cc6efSAdam Nemet   }
467e91cc6efSAdam Nemet }
468e91cc6efSAdam Nemet 
4690456327cSAdam Nemet namespace {
4700456327cSAdam Nemet /// \brief Analyses memory accesses in a loop.
4710456327cSAdam Nemet ///
4720456327cSAdam Nemet /// Checks whether run time pointer checks are needed and builds sets for data
4730456327cSAdam Nemet /// dependence checking.
4740456327cSAdam Nemet class AccessAnalysis {
4750456327cSAdam Nemet public:
4760456327cSAdam Nemet   /// \brief Read or write access location.
4770456327cSAdam Nemet   typedef PointerIntPair<Value *, 1, bool> MemAccessInfo;
4780456327cSAdam Nemet   typedef SmallPtrSet<MemAccessInfo, 8> MemAccessInfoSet;
4790456327cSAdam Nemet 
480e2b885c4SAdam Nemet   AccessAnalysis(const DataLayout &Dl, AliasAnalysis *AA, LoopInfo *LI,
4819cd9a7e3SSilviu Baranga                  MemoryDepChecker::DepCandidates &DA,
4829cd9a7e3SSilviu Baranga                  PredicatedScalarEvolution &PSE)
483e3c0534bSSilviu Baranga       : DL(Dl), AST(*AA), LI(LI), DepCands(DA), IsRTCheckAnalysisNeeded(false),
4849cd9a7e3SSilviu Baranga         PSE(PSE) {}
4850456327cSAdam Nemet 
4860456327cSAdam Nemet   /// \brief Register a load  and whether it is only read from.
487ac80dc75SChandler Carruth   void addLoad(MemoryLocation &Loc, bool IsReadOnly) {
4880456327cSAdam Nemet     Value *Ptr = const_cast<Value*>(Loc.Ptr);
489ecbd1682SChandler Carruth     AST.add(Ptr, MemoryLocation::UnknownSize, Loc.AATags);
4900456327cSAdam Nemet     Accesses.insert(MemAccessInfo(Ptr, false));
4910456327cSAdam Nemet     if (IsReadOnly)
4920456327cSAdam Nemet       ReadOnlyPtr.insert(Ptr);
4930456327cSAdam Nemet   }
4940456327cSAdam Nemet 
4950456327cSAdam Nemet   /// \brief Register a store.
496ac80dc75SChandler Carruth   void addStore(MemoryLocation &Loc) {
4970456327cSAdam Nemet     Value *Ptr = const_cast<Value*>(Loc.Ptr);
498ecbd1682SChandler Carruth     AST.add(Ptr, MemoryLocation::UnknownSize, Loc.AATags);
4990456327cSAdam Nemet     Accesses.insert(MemAccessInfo(Ptr, true));
5000456327cSAdam Nemet   }
5010456327cSAdam Nemet 
5020456327cSAdam Nemet   /// \brief Check whether we can check the pointers at runtime for
503ee61474aSAdam Nemet   /// non-intersection.
504ee61474aSAdam Nemet   ///
505ee61474aSAdam Nemet   /// Returns true if we need no check or if we do and we can generate them
506ee61474aSAdam Nemet   /// (i.e. the pointers have computable bounds).
5077cdebac0SAdam Nemet   bool canCheckPtrAtRT(RuntimePointerChecking &RtCheck, ScalarEvolution *SE,
5087cdebac0SAdam Nemet                        Loop *TheLoop, const ValueToValueMap &Strides,
5099f02c586SAndrey Turetskiy                        bool ShouldCheckWrap = false);
5100456327cSAdam Nemet 
5110456327cSAdam Nemet   /// \brief Goes over all memory accesses, checks whether a RT check is needed
5120456327cSAdam Nemet   /// and builds sets of dependent accesses.
5130456327cSAdam Nemet   void buildDependenceSets() {
5140456327cSAdam Nemet     processMemAccesses();
5150456327cSAdam Nemet   }
5160456327cSAdam Nemet 
5175dc3b2cfSAdam Nemet   /// \brief Initial processing of memory accesses determined that we need to
5185dc3b2cfSAdam Nemet   /// perform dependency checking.
5195dc3b2cfSAdam Nemet   ///
5205dc3b2cfSAdam Nemet   /// Note that this can later be cleared if we retry memcheck analysis without
5215dc3b2cfSAdam Nemet   /// dependency checking (i.e. ShouldRetryWithRuntimeCheck).
5220456327cSAdam Nemet   bool isDependencyCheckNeeded() { return !CheckDeps.empty(); }
523df3dc5b9SAdam Nemet 
524df3dc5b9SAdam Nemet   /// We decided that no dependence analysis would be used.  Reset the state.
525df3dc5b9SAdam Nemet   void resetDepChecks(MemoryDepChecker &DepChecker) {
526df3dc5b9SAdam Nemet     CheckDeps.clear();
527a2df750fSAdam Nemet     DepChecker.clearDependences();
528df3dc5b9SAdam Nemet   }
5290456327cSAdam Nemet 
5300456327cSAdam Nemet   MemAccessInfoSet &getDependenciesToCheck() { return CheckDeps; }
5310456327cSAdam Nemet 
5320456327cSAdam Nemet private:
5330456327cSAdam Nemet   typedef SetVector<MemAccessInfo> PtrAccessSet;
5340456327cSAdam Nemet 
5350456327cSAdam Nemet   /// \brief Go over all memory access and check whether runtime pointer checks
536b41d2d3fSAdam Nemet   /// are needed and build sets of dependency check candidates.
5370456327cSAdam Nemet   void processMemAccesses();
5380456327cSAdam Nemet 
5390456327cSAdam Nemet   /// Set of all accesses.
5400456327cSAdam Nemet   PtrAccessSet Accesses;
5410456327cSAdam Nemet 
542a28d91d8SMehdi Amini   const DataLayout &DL;
543a28d91d8SMehdi Amini 
5440456327cSAdam Nemet   /// Set of accesses that need a further dependence check.
5450456327cSAdam Nemet   MemAccessInfoSet CheckDeps;
5460456327cSAdam Nemet 
5470456327cSAdam Nemet   /// Set of pointers that are read only.
5480456327cSAdam Nemet   SmallPtrSet<Value*, 16> ReadOnlyPtr;
5490456327cSAdam Nemet 
5500456327cSAdam Nemet   /// An alias set tracker to partition the access set by underlying object and
5510456327cSAdam Nemet   //intrinsic property (such as TBAA metadata).
5520456327cSAdam Nemet   AliasSetTracker AST;
5530456327cSAdam Nemet 
554e2b885c4SAdam Nemet   LoopInfo *LI;
555e2b885c4SAdam Nemet 
5560456327cSAdam Nemet   /// Sets of potentially dependent accesses - members of one set share an
5570456327cSAdam Nemet   /// underlying pointer. The set "CheckDeps" identfies which sets really need a
5580456327cSAdam Nemet   /// dependence check.
559dee666bcSAdam Nemet   MemoryDepChecker::DepCandidates &DepCands;
5600456327cSAdam Nemet 
5615dc3b2cfSAdam Nemet   /// \brief Initial processing of memory accesses determined that we may need
5625dc3b2cfSAdam Nemet   /// to add memchecks.  Perform the analysis to determine the necessary checks.
5635dc3b2cfSAdam Nemet   ///
5645dc3b2cfSAdam Nemet   /// Note that, this is different from isDependencyCheckNeeded.  When we retry
5655dc3b2cfSAdam Nemet   /// memcheck analysis without dependency checking
5665dc3b2cfSAdam Nemet   /// (i.e. ShouldRetryWithRuntimeCheck), isDependencyCheckNeeded is cleared
5675dc3b2cfSAdam Nemet   /// while this remains set if we have potentially dependent accesses.
5685dc3b2cfSAdam Nemet   bool IsRTCheckAnalysisNeeded;
569e3c0534bSSilviu Baranga 
570e3c0534bSSilviu Baranga   /// The SCEV predicate containing all the SCEV-related assumptions.
5719cd9a7e3SSilviu Baranga   PredicatedScalarEvolution &PSE;
5720456327cSAdam Nemet };
5730456327cSAdam Nemet 
5740456327cSAdam Nemet } // end anonymous namespace
5750456327cSAdam Nemet 
5760456327cSAdam Nemet /// \brief Check whether a pointer can participate in a runtime bounds check.
5779cd9a7e3SSilviu Baranga static bool hasComputableBounds(PredicatedScalarEvolution &PSE,
578e3c0534bSSilviu Baranga                                 const ValueToValueMap &Strides, Value *Ptr,
5799cd9a7e3SSilviu Baranga                                 Loop *L) {
5809cd9a7e3SSilviu Baranga   const SCEV *PtrScev = replaceSymbolicStrideSCEV(PSE, Strides, Ptr);
581279784ffSAdam Nemet 
582279784ffSAdam Nemet   // The bounds for loop-invariant pointer is trivial.
583279784ffSAdam Nemet   if (PSE.getSE()->isLoopInvariant(PtrScev, L))
584279784ffSAdam Nemet     return true;
585279784ffSAdam Nemet 
5860456327cSAdam Nemet   const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(PtrScev);
5870456327cSAdam Nemet   if (!AR)
5880456327cSAdam Nemet     return false;
5890456327cSAdam Nemet 
5900456327cSAdam Nemet   return AR->isAffine();
5910456327cSAdam Nemet }
5920456327cSAdam Nemet 
5939f02c586SAndrey Turetskiy /// \brief Check whether a pointer address cannot wrap.
5949f02c586SAndrey Turetskiy static bool isNoWrap(PredicatedScalarEvolution &PSE,
5959f02c586SAndrey Turetskiy                      const ValueToValueMap &Strides, Value *Ptr, Loop *L) {
5969f02c586SAndrey Turetskiy   const SCEV *PtrScev = PSE.getSCEV(Ptr);
5979f02c586SAndrey Turetskiy   if (PSE.getSE()->isLoopInvariant(PtrScev, L))
5989f02c586SAndrey Turetskiy     return true;
5999f02c586SAndrey Turetskiy 
6007afb46d3SDavid Majnemer   int64_t Stride = getPtrStride(PSE, Ptr, L, Strides);
6019f02c586SAndrey Turetskiy   return Stride == 1;
6029f02c586SAndrey Turetskiy }
6039f02c586SAndrey Turetskiy 
6047cdebac0SAdam Nemet bool AccessAnalysis::canCheckPtrAtRT(RuntimePointerChecking &RtCheck,
6057cdebac0SAdam Nemet                                      ScalarEvolution *SE, Loop *TheLoop,
6067cdebac0SAdam Nemet                                      const ValueToValueMap &StridesMap,
6079f02c586SAndrey Turetskiy                                      bool ShouldCheckWrap) {
6080456327cSAdam Nemet   // Find pointers with computable bounds. We are going to use this information
6090456327cSAdam Nemet   // to place a runtime bound check.
6100456327cSAdam Nemet   bool CanDoRT = true;
6110456327cSAdam Nemet 
612ee61474aSAdam Nemet   bool NeedRTCheck = false;
6135dc3b2cfSAdam Nemet   if (!IsRTCheckAnalysisNeeded) return true;
61498a13719SSilviu Baranga 
6150456327cSAdam Nemet   bool IsDepCheckNeeded = isDependencyCheckNeeded();
6160456327cSAdam Nemet 
6170456327cSAdam Nemet   // We assign a consecutive id to access from different alias sets.
6180456327cSAdam Nemet   // Accesses between different groups doesn't need to be checked.
6190456327cSAdam Nemet   unsigned ASId = 1;
6200456327cSAdam Nemet   for (auto &AS : AST) {
621424edc6cSAdam Nemet     int NumReadPtrChecks = 0;
622424edc6cSAdam Nemet     int NumWritePtrChecks = 0;
623424edc6cSAdam Nemet 
6240456327cSAdam Nemet     // We assign consecutive id to access from different dependence sets.
6250456327cSAdam Nemet     // Accesses within the same set don't need a runtime check.
6260456327cSAdam Nemet     unsigned RunningDepId = 1;
6270456327cSAdam Nemet     DenseMap<Value *, unsigned> DepSetId;
6280456327cSAdam Nemet 
6290456327cSAdam Nemet     for (auto A : AS) {
6300456327cSAdam Nemet       Value *Ptr = A.getValue();
6310456327cSAdam Nemet       bool IsWrite = Accesses.count(MemAccessInfo(Ptr, true));
6320456327cSAdam Nemet       MemAccessInfo Access(Ptr, IsWrite);
6330456327cSAdam Nemet 
634424edc6cSAdam Nemet       if (IsWrite)
635424edc6cSAdam Nemet         ++NumWritePtrChecks;
636424edc6cSAdam Nemet       else
637424edc6cSAdam Nemet         ++NumReadPtrChecks;
638424edc6cSAdam Nemet 
6399cd9a7e3SSilviu Baranga       if (hasComputableBounds(PSE, StridesMap, Ptr, TheLoop) &&
640a28d91d8SMehdi Amini           // When we run after a failing dependency check we have to make sure
641a28d91d8SMehdi Amini           // we don't have wrapping pointers.
6429f02c586SAndrey Turetskiy           (!ShouldCheckWrap || isNoWrap(PSE, StridesMap, Ptr, TheLoop))) {
6430456327cSAdam Nemet         // The id of the dependence set.
6440456327cSAdam Nemet         unsigned DepId;
6450456327cSAdam Nemet 
6460456327cSAdam Nemet         if (IsDepCheckNeeded) {
6470456327cSAdam Nemet           Value *Leader = DepCands.getLeaderValue(Access).getPointer();
6480456327cSAdam Nemet           unsigned &LeaderId = DepSetId[Leader];
6490456327cSAdam Nemet           if (!LeaderId)
6500456327cSAdam Nemet             LeaderId = RunningDepId++;
6510456327cSAdam Nemet           DepId = LeaderId;
6520456327cSAdam Nemet         } else
6530456327cSAdam Nemet           // Each access has its own dependence set.
6540456327cSAdam Nemet           DepId = RunningDepId++;
6550456327cSAdam Nemet 
6569cd9a7e3SSilviu Baranga         RtCheck.insert(TheLoop, Ptr, IsWrite, DepId, ASId, StridesMap, PSE);
6570456327cSAdam Nemet 
658339f42b3SAdam Nemet         DEBUG(dbgs() << "LAA: Found a runtime check ptr:" << *Ptr << '\n');
6590456327cSAdam Nemet       } else {
660f10ca278SAdam Nemet         DEBUG(dbgs() << "LAA: Can't find bounds for ptr:" << *Ptr << '\n');
6610456327cSAdam Nemet         CanDoRT = false;
6620456327cSAdam Nemet       }
6630456327cSAdam Nemet     }
6640456327cSAdam Nemet 
665424edc6cSAdam Nemet     // If we have at least two writes or one write and a read then we need to
666424edc6cSAdam Nemet     // check them.  But there is no need to checks if there is only one
667424edc6cSAdam Nemet     // dependence set for this alias set.
668424edc6cSAdam Nemet     //
669424edc6cSAdam Nemet     // Note that this function computes CanDoRT and NeedRTCheck independently.
670424edc6cSAdam Nemet     // For example CanDoRT=false, NeedRTCheck=false means that we have a pointer
671424edc6cSAdam Nemet     // for which we couldn't find the bounds but we don't actually need to emit
672424edc6cSAdam Nemet     // any checks so it does not matter.
673424edc6cSAdam Nemet     if (!(IsDepCheckNeeded && CanDoRT && RunningDepId == 2))
674424edc6cSAdam Nemet       NeedRTCheck |= (NumWritePtrChecks >= 2 || (NumReadPtrChecks >= 1 &&
675424edc6cSAdam Nemet                                                  NumWritePtrChecks >= 1));
676424edc6cSAdam Nemet 
6770456327cSAdam Nemet     ++ASId;
6780456327cSAdam Nemet   }
6790456327cSAdam Nemet 
6800456327cSAdam Nemet   // If the pointers that we would use for the bounds comparison have different
6810456327cSAdam Nemet   // address spaces, assume the values aren't directly comparable, so we can't
6820456327cSAdam Nemet   // use them for the runtime check. We also have to assume they could
6830456327cSAdam Nemet   // overlap. In the future there should be metadata for whether address spaces
6840456327cSAdam Nemet   // are disjoint.
6850456327cSAdam Nemet   unsigned NumPointers = RtCheck.Pointers.size();
6860456327cSAdam Nemet   for (unsigned i = 0; i < NumPointers; ++i) {
6870456327cSAdam Nemet     for (unsigned j = i + 1; j < NumPointers; ++j) {
6880456327cSAdam Nemet       // Only need to check pointers between two different dependency sets.
6899f7dedc3SAdam Nemet       if (RtCheck.Pointers[i].DependencySetId ==
6909f7dedc3SAdam Nemet           RtCheck.Pointers[j].DependencySetId)
6910456327cSAdam Nemet        continue;
6920456327cSAdam Nemet       // Only need to check pointers in the same alias set.
6939f7dedc3SAdam Nemet       if (RtCheck.Pointers[i].AliasSetId != RtCheck.Pointers[j].AliasSetId)
6940456327cSAdam Nemet         continue;
6950456327cSAdam Nemet 
6969f7dedc3SAdam Nemet       Value *PtrI = RtCheck.Pointers[i].PointerValue;
6979f7dedc3SAdam Nemet       Value *PtrJ = RtCheck.Pointers[j].PointerValue;
6980456327cSAdam Nemet 
6990456327cSAdam Nemet       unsigned ASi = PtrI->getType()->getPointerAddressSpace();
7000456327cSAdam Nemet       unsigned ASj = PtrJ->getType()->getPointerAddressSpace();
7010456327cSAdam Nemet       if (ASi != ASj) {
702339f42b3SAdam Nemet         DEBUG(dbgs() << "LAA: Runtime check would require comparison between"
7030456327cSAdam Nemet                        " different address spaces\n");
7040456327cSAdam Nemet         return false;
7050456327cSAdam Nemet       }
7060456327cSAdam Nemet     }
7070456327cSAdam Nemet   }
7080456327cSAdam Nemet 
7091b6b50a9SSilviu Baranga   if (NeedRTCheck && CanDoRT)
71015840393SAdam Nemet     RtCheck.generateChecks(DepCands, IsDepCheckNeeded);
7111b6b50a9SSilviu Baranga 
712155e8741SAdam Nemet   DEBUG(dbgs() << "LAA: We need to do " << RtCheck.getNumberOfChecks()
713ee61474aSAdam Nemet                << " pointer comparisons.\n");
714ee61474aSAdam Nemet 
715ee61474aSAdam Nemet   RtCheck.Need = NeedRTCheck;
716ee61474aSAdam Nemet 
717ee61474aSAdam Nemet   bool CanDoRTIfNeeded = !NeedRTCheck || CanDoRT;
718ee61474aSAdam Nemet   if (!CanDoRTIfNeeded)
719ee61474aSAdam Nemet     RtCheck.reset();
720ee61474aSAdam Nemet   return CanDoRTIfNeeded;
7210456327cSAdam Nemet }
7220456327cSAdam Nemet 
7230456327cSAdam Nemet void AccessAnalysis::processMemAccesses() {
7240456327cSAdam Nemet   // We process the set twice: first we process read-write pointers, last we
7250456327cSAdam Nemet   // process read-only pointers. This allows us to skip dependence tests for
7260456327cSAdam Nemet   // read-only pointers.
7270456327cSAdam Nemet 
728339f42b3SAdam Nemet   DEBUG(dbgs() << "LAA: Processing memory accesses...\n");
7290456327cSAdam Nemet   DEBUG(dbgs() << "  AST: "; AST.dump());
7309c926579SAdam Nemet   DEBUG(dbgs() << "LAA:   Accesses(" << Accesses.size() << "):\n");
7310456327cSAdam Nemet   DEBUG({
7320456327cSAdam Nemet     for (auto A : Accesses)
7330456327cSAdam Nemet       dbgs() << "\t" << *A.getPointer() << " (" <<
7340456327cSAdam Nemet                 (A.getInt() ? "write" : (ReadOnlyPtr.count(A.getPointer()) ?
7350456327cSAdam Nemet                                          "read-only" : "read")) << ")\n";
7360456327cSAdam Nemet   });
7370456327cSAdam Nemet 
7380456327cSAdam Nemet   // The AliasSetTracker has nicely partitioned our pointers by metadata
7390456327cSAdam Nemet   // compatibility and potential for underlying-object overlap. As a result, we
7400456327cSAdam Nemet   // only need to check for potential pointer dependencies within each alias
7410456327cSAdam Nemet   // set.
7420456327cSAdam Nemet   for (auto &AS : AST) {
7430456327cSAdam Nemet     // Note that both the alias-set tracker and the alias sets themselves used
7440456327cSAdam Nemet     // linked lists internally and so the iteration order here is deterministic
7450456327cSAdam Nemet     // (matching the original instruction order within each set).
7460456327cSAdam Nemet 
7470456327cSAdam Nemet     bool SetHasWrite = false;
7480456327cSAdam Nemet 
7490456327cSAdam Nemet     // Map of pointers to last access encountered.
7500456327cSAdam Nemet     typedef DenseMap<Value*, MemAccessInfo> UnderlyingObjToAccessMap;
7510456327cSAdam Nemet     UnderlyingObjToAccessMap ObjToLastAccess;
7520456327cSAdam Nemet 
7530456327cSAdam Nemet     // Set of access to check after all writes have been processed.
7540456327cSAdam Nemet     PtrAccessSet DeferredAccesses;
7550456327cSAdam Nemet 
7560456327cSAdam Nemet     // Iterate over each alias set twice, once to process read/write pointers,
7570456327cSAdam Nemet     // and then to process read-only pointers.
7580456327cSAdam Nemet     for (int SetIteration = 0; SetIteration < 2; ++SetIteration) {
7590456327cSAdam Nemet       bool UseDeferred = SetIteration > 0;
7600456327cSAdam Nemet       PtrAccessSet &S = UseDeferred ? DeferredAccesses : Accesses;
7610456327cSAdam Nemet 
7620456327cSAdam Nemet       for (auto AV : AS) {
7630456327cSAdam Nemet         Value *Ptr = AV.getValue();
7640456327cSAdam Nemet 
7650456327cSAdam Nemet         // For a single memory access in AliasSetTracker, Accesses may contain
7660456327cSAdam Nemet         // both read and write, and they both need to be handled for CheckDeps.
7670456327cSAdam Nemet         for (auto AC : S) {
7680456327cSAdam Nemet           if (AC.getPointer() != Ptr)
7690456327cSAdam Nemet             continue;
7700456327cSAdam Nemet 
7710456327cSAdam Nemet           bool IsWrite = AC.getInt();
7720456327cSAdam Nemet 
7730456327cSAdam Nemet           // If we're using the deferred access set, then it contains only
7740456327cSAdam Nemet           // reads.
7750456327cSAdam Nemet           bool IsReadOnlyPtr = ReadOnlyPtr.count(Ptr) && !IsWrite;
7760456327cSAdam Nemet           if (UseDeferred && !IsReadOnlyPtr)
7770456327cSAdam Nemet             continue;
7780456327cSAdam Nemet           // Otherwise, the pointer must be in the PtrAccessSet, either as a
7790456327cSAdam Nemet           // read or a write.
7800456327cSAdam Nemet           assert(((IsReadOnlyPtr && UseDeferred) || IsWrite ||
7810456327cSAdam Nemet                   S.count(MemAccessInfo(Ptr, false))) &&
7820456327cSAdam Nemet                  "Alias-set pointer not in the access set?");
7830456327cSAdam Nemet 
7840456327cSAdam Nemet           MemAccessInfo Access(Ptr, IsWrite);
7850456327cSAdam Nemet           DepCands.insert(Access);
7860456327cSAdam Nemet 
7870456327cSAdam Nemet           // Memorize read-only pointers for later processing and skip them in
7880456327cSAdam Nemet           // the first round (they need to be checked after we have seen all
7890456327cSAdam Nemet           // write pointers). Note: we also mark pointer that are not
7900456327cSAdam Nemet           // consecutive as "read-only" pointers (so that we check
7910456327cSAdam Nemet           // "a[b[i]] +="). Hence, we need the second check for "!IsWrite".
7920456327cSAdam Nemet           if (!UseDeferred && IsReadOnlyPtr) {
7930456327cSAdam Nemet             DeferredAccesses.insert(Access);
7940456327cSAdam Nemet             continue;
7950456327cSAdam Nemet           }
7960456327cSAdam Nemet 
7970456327cSAdam Nemet           // If this is a write - check other reads and writes for conflicts. If
7980456327cSAdam Nemet           // this is a read only check other writes for conflicts (but only if
7990456327cSAdam Nemet           // there is no other write to the ptr - this is an optimization to
8000456327cSAdam Nemet           // catch "a[i] = a[i] + " without having to do a dependence check).
8010456327cSAdam Nemet           if ((IsWrite || IsReadOnlyPtr) && SetHasWrite) {
8020456327cSAdam Nemet             CheckDeps.insert(Access);
8035dc3b2cfSAdam Nemet             IsRTCheckAnalysisNeeded = true;
8040456327cSAdam Nemet           }
8050456327cSAdam Nemet 
8060456327cSAdam Nemet           if (IsWrite)
8070456327cSAdam Nemet             SetHasWrite = true;
8080456327cSAdam Nemet 
8090456327cSAdam Nemet           // Create sets of pointers connected by a shared alias set and
8100456327cSAdam Nemet           // underlying object.
8110456327cSAdam Nemet           typedef SmallVector<Value *, 16> ValueVector;
8120456327cSAdam Nemet           ValueVector TempObjects;
813e2b885c4SAdam Nemet 
814e2b885c4SAdam Nemet           GetUnderlyingObjects(Ptr, TempObjects, DL, LI);
815e2b885c4SAdam Nemet           DEBUG(dbgs() << "Underlying objects for pointer " << *Ptr << "\n");
8160456327cSAdam Nemet           for (Value *UnderlyingObj : TempObjects) {
817afd13519SMehdi Amini             // nullptr never alias, don't join sets for pointer that have "null"
818afd13519SMehdi Amini             // in their UnderlyingObjects list.
819afd13519SMehdi Amini             if (isa<ConstantPointerNull>(UnderlyingObj))
820afd13519SMehdi Amini               continue;
821afd13519SMehdi Amini 
8220456327cSAdam Nemet             UnderlyingObjToAccessMap::iterator Prev =
8230456327cSAdam Nemet                 ObjToLastAccess.find(UnderlyingObj);
8240456327cSAdam Nemet             if (Prev != ObjToLastAccess.end())
8250456327cSAdam Nemet               DepCands.unionSets(Access, Prev->second);
8260456327cSAdam Nemet 
8270456327cSAdam Nemet             ObjToLastAccess[UnderlyingObj] = Access;
828e2b885c4SAdam Nemet             DEBUG(dbgs() << "  " << *UnderlyingObj << "\n");
8290456327cSAdam Nemet           }
8300456327cSAdam Nemet         }
8310456327cSAdam Nemet       }
8320456327cSAdam Nemet     }
8330456327cSAdam Nemet   }
8340456327cSAdam Nemet }
8350456327cSAdam Nemet 
8360456327cSAdam Nemet static bool isInBoundsGep(Value *Ptr) {
8370456327cSAdam Nemet   if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr))
8380456327cSAdam Nemet     return GEP->isInBounds();
8390456327cSAdam Nemet   return false;
8400456327cSAdam Nemet }
8410456327cSAdam Nemet 
842c4866d29SAdam Nemet /// \brief Return true if an AddRec pointer \p Ptr is unsigned non-wrapping,
843c4866d29SAdam Nemet /// i.e. monotonically increasing/decreasing.
844c4866d29SAdam Nemet static bool isNoWrapAddRec(Value *Ptr, const SCEVAddRecExpr *AR,
845ea63a7f5SSilviu Baranga                            PredicatedScalarEvolution &PSE, const Loop *L) {
846c4866d29SAdam Nemet   // FIXME: This should probably only return true for NUW.
847c4866d29SAdam Nemet   if (AR->getNoWrapFlags(SCEV::NoWrapMask))
848c4866d29SAdam Nemet     return true;
849c4866d29SAdam Nemet 
850c4866d29SAdam Nemet   // Scalar evolution does not propagate the non-wrapping flags to values that
851c4866d29SAdam Nemet   // are derived from a non-wrapping induction variable because non-wrapping
852c4866d29SAdam Nemet   // could be flow-sensitive.
853c4866d29SAdam Nemet   //
854c4866d29SAdam Nemet   // Look through the potentially overflowing instruction to try to prove
855c4866d29SAdam Nemet   // non-wrapping for the *specific* value of Ptr.
856c4866d29SAdam Nemet 
857c4866d29SAdam Nemet   // The arithmetic implied by an inbounds GEP can't overflow.
858c4866d29SAdam Nemet   auto *GEP = dyn_cast<GetElementPtrInst>(Ptr);
859c4866d29SAdam Nemet   if (!GEP || !GEP->isInBounds())
860c4866d29SAdam Nemet     return false;
861c4866d29SAdam Nemet 
862c4866d29SAdam Nemet   // Make sure there is only one non-const index and analyze that.
863c4866d29SAdam Nemet   Value *NonConstIndex = nullptr;
8648b401013SDavid Majnemer   for (Value *Index : make_range(GEP->idx_begin(), GEP->idx_end()))
8658b401013SDavid Majnemer     if (!isa<ConstantInt>(Index)) {
866c4866d29SAdam Nemet       if (NonConstIndex)
867c4866d29SAdam Nemet         return false;
8688b401013SDavid Majnemer       NonConstIndex = Index;
869c4866d29SAdam Nemet     }
870c4866d29SAdam Nemet   if (!NonConstIndex)
871c4866d29SAdam Nemet     // The recurrence is on the pointer, ignore for now.
872c4866d29SAdam Nemet     return false;
873c4866d29SAdam Nemet 
874c4866d29SAdam Nemet   // The index in GEP is signed.  It is non-wrapping if it's derived from a NSW
875c4866d29SAdam Nemet   // AddRec using a NSW operation.
876c4866d29SAdam Nemet   if (auto *OBO = dyn_cast<OverflowingBinaryOperator>(NonConstIndex))
877c4866d29SAdam Nemet     if (OBO->hasNoSignedWrap() &&
878c4866d29SAdam Nemet         // Assume constant for other the operand so that the AddRec can be
879c4866d29SAdam Nemet         // easily found.
880c4866d29SAdam Nemet         isa<ConstantInt>(OBO->getOperand(1))) {
881ea63a7f5SSilviu Baranga       auto *OpScev = PSE.getSCEV(OBO->getOperand(0));
882c4866d29SAdam Nemet 
883c4866d29SAdam Nemet       if (auto *OpAR = dyn_cast<SCEVAddRecExpr>(OpScev))
884c4866d29SAdam Nemet         return OpAR->getLoop() == L && OpAR->getNoWrapFlags(SCEV::FlagNSW);
885c4866d29SAdam Nemet     }
886c4866d29SAdam Nemet 
887c4866d29SAdam Nemet   return false;
888c4866d29SAdam Nemet }
889c4866d29SAdam Nemet 
8900456327cSAdam Nemet /// \brief Check whether the access through \p Ptr has a constant stride.
8917afb46d3SDavid Majnemer int64_t llvm::getPtrStride(PredicatedScalarEvolution &PSE, Value *Ptr,
892ea63a7f5SSilviu Baranga                            const Loop *Lp, const ValueToValueMap &StridesMap,
8935f8cc0c3SElena Demikhovsky                            bool Assume, bool ShouldCheckWrap) {
894e3dcce97SCraig Topper   Type *Ty = Ptr->getType();
8950456327cSAdam Nemet   assert(Ty->isPointerTy() && "Unexpected non-ptr");
8960456327cSAdam Nemet 
8970456327cSAdam Nemet   // Make sure that the pointer does not point to aggregate types.
898e3dcce97SCraig Topper   auto *PtrTy = cast<PointerType>(Ty);
8990456327cSAdam Nemet   if (PtrTy->getElementType()->isAggregateType()) {
900ea63a7f5SSilviu Baranga     DEBUG(dbgs() << "LAA: Bad stride - Not a pointer to a scalar type" << *Ptr
901ea63a7f5SSilviu Baranga                  << "\n");
9020456327cSAdam Nemet     return 0;
9030456327cSAdam Nemet   }
9040456327cSAdam Nemet 
9059cd9a7e3SSilviu Baranga   const SCEV *PtrScev = replaceSymbolicStrideSCEV(PSE, StridesMap, Ptr);
9060456327cSAdam Nemet 
9070456327cSAdam Nemet   const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(PtrScev);
908ea63a7f5SSilviu Baranga   if (Assume && !AR)
909d68ed854SSilviu Baranga     AR = PSE.getAsAddRec(Ptr);
910ea63a7f5SSilviu Baranga 
9110456327cSAdam Nemet   if (!AR) {
912ea63a7f5SSilviu Baranga     DEBUG(dbgs() << "LAA: Bad stride - Not an AddRecExpr pointer " << *Ptr
913ea63a7f5SSilviu Baranga                  << " SCEV: " << *PtrScev << "\n");
9140456327cSAdam Nemet     return 0;
9150456327cSAdam Nemet   }
9160456327cSAdam Nemet 
9170456327cSAdam Nemet   // The accesss function must stride over the innermost loop.
9180456327cSAdam Nemet   if (Lp != AR->getLoop()) {
919339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Bad stride - Not striding over innermost loop " <<
920ea63a7f5SSilviu Baranga           *Ptr << " SCEV: " << *AR << "\n");
921a02ce98bSKyle Butt     return 0;
9220456327cSAdam Nemet   }
9230456327cSAdam Nemet 
9240456327cSAdam Nemet   // The address calculation must not wrap. Otherwise, a dependence could be
9250456327cSAdam Nemet   // inverted.
9260456327cSAdam Nemet   // An inbounds getelementptr that is a AddRec with a unit stride
9270456327cSAdam Nemet   // cannot wrap per definition. The unit stride requirement is checked later.
9280456327cSAdam Nemet   // An getelementptr without an inbounds attribute and unit stride would have
9290456327cSAdam Nemet   // to access the pointer value "0" which is undefined behavior in address
9300456327cSAdam Nemet   // space 0, therefore we can also vectorize this case.
9310456327cSAdam Nemet   bool IsInBoundsGEP = isInBoundsGep(Ptr);
9325f8cc0c3SElena Demikhovsky   bool IsNoWrapAddRec = !ShouldCheckWrap ||
933ea63a7f5SSilviu Baranga     PSE.hasNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW) ||
934ea63a7f5SSilviu Baranga     isNoWrapAddRec(Ptr, AR, PSE, Lp);
9350456327cSAdam Nemet   bool IsInAddressSpaceZero = PtrTy->getAddressSpace() == 0;
9360456327cSAdam Nemet   if (!IsNoWrapAddRec && !IsInBoundsGEP && !IsInAddressSpaceZero) {
937ea63a7f5SSilviu Baranga     if (Assume) {
938ea63a7f5SSilviu Baranga       PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW);
939ea63a7f5SSilviu Baranga       IsNoWrapAddRec = true;
940ea63a7f5SSilviu Baranga       DEBUG(dbgs() << "LAA: Pointer may wrap in the address space:\n"
941ea63a7f5SSilviu Baranga                    << "LAA:   Pointer: " << *Ptr << "\n"
942ea63a7f5SSilviu Baranga                    << "LAA:   SCEV: " << *AR << "\n"
943ea63a7f5SSilviu Baranga                    << "LAA:   Added an overflow assumption\n");
944ea63a7f5SSilviu Baranga     } else {
945339f42b3SAdam Nemet       DEBUG(dbgs() << "LAA: Bad stride - Pointer may wrap in the address space "
946ea63a7f5SSilviu Baranga                    << *Ptr << " SCEV: " << *AR << "\n");
9470456327cSAdam Nemet       return 0;
9480456327cSAdam Nemet     }
949ea63a7f5SSilviu Baranga   }
9500456327cSAdam Nemet 
9510456327cSAdam Nemet   // Check the step is constant.
9529cd9a7e3SSilviu Baranga   const SCEV *Step = AR->getStepRecurrence(*PSE.getSE());
9530456327cSAdam Nemet 
954943befedSAdam Nemet   // Calculate the pointer stride and check if it is constant.
9550456327cSAdam Nemet   const SCEVConstant *C = dyn_cast<SCEVConstant>(Step);
9560456327cSAdam Nemet   if (!C) {
957339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Bad stride - Not a constant strided " << *Ptr <<
958ea63a7f5SSilviu Baranga           " SCEV: " << *AR << "\n");
9590456327cSAdam Nemet     return 0;
9600456327cSAdam Nemet   }
9610456327cSAdam Nemet 
962a28d91d8SMehdi Amini   auto &DL = Lp->getHeader()->getModule()->getDataLayout();
963a28d91d8SMehdi Amini   int64_t Size = DL.getTypeAllocSize(PtrTy->getElementType());
9640de2feceSSanjoy Das   const APInt &APStepVal = C->getAPInt();
9650456327cSAdam Nemet 
9660456327cSAdam Nemet   // Huge step value - give up.
9670456327cSAdam Nemet   if (APStepVal.getBitWidth() > 64)
9680456327cSAdam Nemet     return 0;
9690456327cSAdam Nemet 
9700456327cSAdam Nemet   int64_t StepVal = APStepVal.getSExtValue();
9710456327cSAdam Nemet 
9720456327cSAdam Nemet   // Strided access.
9730456327cSAdam Nemet   int64_t Stride = StepVal / Size;
9740456327cSAdam Nemet   int64_t Rem = StepVal % Size;
9750456327cSAdam Nemet   if (Rem)
9760456327cSAdam Nemet     return 0;
9770456327cSAdam Nemet 
9780456327cSAdam Nemet   // If the SCEV could wrap but we have an inbounds gep with a unit stride we
9790456327cSAdam Nemet   // know we can't "wrap around the address space". In case of address space
9800456327cSAdam Nemet   // zero we know that this won't happen without triggering undefined behavior.
9810456327cSAdam Nemet   if (!IsNoWrapAddRec && (IsInBoundsGEP || IsInAddressSpaceZero) &&
982ea63a7f5SSilviu Baranga       Stride != 1 && Stride != -1) {
983ea63a7f5SSilviu Baranga     if (Assume) {
984ea63a7f5SSilviu Baranga       // We can avoid this case by adding a run-time check.
985ea63a7f5SSilviu Baranga       DEBUG(dbgs() << "LAA: Non unit strided pointer which is not either "
986ea63a7f5SSilviu Baranga                    << "inbouds or in address space 0 may wrap:\n"
987ea63a7f5SSilviu Baranga                    << "LAA:   Pointer: " << *Ptr << "\n"
988ea63a7f5SSilviu Baranga                    << "LAA:   SCEV: " << *AR << "\n"
989ea63a7f5SSilviu Baranga                    << "LAA:   Added an overflow assumption\n");
990ea63a7f5SSilviu Baranga       PSE.setNoOverflow(Ptr, SCEVWrapPredicate::IncrementNUSW);
991ea63a7f5SSilviu Baranga     } else
9920456327cSAdam Nemet       return 0;
993ea63a7f5SSilviu Baranga   }
9940456327cSAdam Nemet 
9950456327cSAdam Nemet   return Stride;
9960456327cSAdam Nemet }
9970456327cSAdam Nemet 
998f1c00a22SHaicheng Wu /// Take the pointer operand from the Load/Store instruction.
999f1c00a22SHaicheng Wu /// Returns NULL if this is not a valid Load/Store instruction.
1000f1c00a22SHaicheng Wu static Value *getPointerOperand(Value *I) {
10018b401013SDavid Majnemer   if (auto *LI = dyn_cast<LoadInst>(I))
1002f1c00a22SHaicheng Wu     return LI->getPointerOperand();
10038b401013SDavid Majnemer   if (auto *SI = dyn_cast<StoreInst>(I))
1004f1c00a22SHaicheng Wu     return SI->getPointerOperand();
1005f1c00a22SHaicheng Wu   return nullptr;
1006f1c00a22SHaicheng Wu }
1007f1c00a22SHaicheng Wu 
1008f1c00a22SHaicheng Wu /// Take the address space operand from the Load/Store instruction.
1009f1c00a22SHaicheng Wu /// Returns -1 if this is not a valid Load/Store instruction.
1010f1c00a22SHaicheng Wu static unsigned getAddressSpaceOperand(Value *I) {
1011f1c00a22SHaicheng Wu   if (LoadInst *L = dyn_cast<LoadInst>(I))
1012f1c00a22SHaicheng Wu     return L->getPointerAddressSpace();
1013f1c00a22SHaicheng Wu   if (StoreInst *S = dyn_cast<StoreInst>(I))
1014f1c00a22SHaicheng Wu     return S->getPointerAddressSpace();
1015f1c00a22SHaicheng Wu   return -1;
1016f1c00a22SHaicheng Wu }
1017f1c00a22SHaicheng Wu 
1018f1c00a22SHaicheng Wu /// Returns true if the memory operations \p A and \p B are consecutive.
1019f1c00a22SHaicheng Wu bool llvm::isConsecutiveAccess(Value *A, Value *B, const DataLayout &DL,
1020f1c00a22SHaicheng Wu                                ScalarEvolution &SE, bool CheckType) {
1021f1c00a22SHaicheng Wu   Value *PtrA = getPointerOperand(A);
1022f1c00a22SHaicheng Wu   Value *PtrB = getPointerOperand(B);
1023f1c00a22SHaicheng Wu   unsigned ASA = getAddressSpaceOperand(A);
1024f1c00a22SHaicheng Wu   unsigned ASB = getAddressSpaceOperand(B);
1025f1c00a22SHaicheng Wu 
1026f1c00a22SHaicheng Wu   // Check that the address spaces match and that the pointers are valid.
1027f1c00a22SHaicheng Wu   if (!PtrA || !PtrB || (ASA != ASB))
1028f1c00a22SHaicheng Wu     return false;
1029f1c00a22SHaicheng Wu 
1030f1c00a22SHaicheng Wu   // Make sure that A and B are different pointers.
1031f1c00a22SHaicheng Wu   if (PtrA == PtrB)
1032f1c00a22SHaicheng Wu     return false;
1033f1c00a22SHaicheng Wu 
1034f1c00a22SHaicheng Wu   // Make sure that A and B have the same type if required.
1035f1c00a22SHaicheng Wu   if (CheckType && PtrA->getType() != PtrB->getType())
1036f1c00a22SHaicheng Wu     return false;
1037f1c00a22SHaicheng Wu 
1038f1c00a22SHaicheng Wu   unsigned PtrBitWidth = DL.getPointerSizeInBits(ASA);
1039f1c00a22SHaicheng Wu   Type *Ty = cast<PointerType>(PtrA->getType())->getElementType();
1040f1c00a22SHaicheng Wu   APInt Size(PtrBitWidth, DL.getTypeStoreSize(Ty));
1041f1c00a22SHaicheng Wu 
1042f1c00a22SHaicheng Wu   APInt OffsetA(PtrBitWidth, 0), OffsetB(PtrBitWidth, 0);
1043f1c00a22SHaicheng Wu   PtrA = PtrA->stripAndAccumulateInBoundsConstantOffsets(DL, OffsetA);
1044f1c00a22SHaicheng Wu   PtrB = PtrB->stripAndAccumulateInBoundsConstantOffsets(DL, OffsetB);
1045f1c00a22SHaicheng Wu 
1046f1c00a22SHaicheng Wu   //  OffsetDelta = OffsetB - OffsetA;
1047f1c00a22SHaicheng Wu   const SCEV *OffsetSCEVA = SE.getConstant(OffsetA);
1048f1c00a22SHaicheng Wu   const SCEV *OffsetSCEVB = SE.getConstant(OffsetB);
1049f1c00a22SHaicheng Wu   const SCEV *OffsetDeltaSCEV = SE.getMinusSCEV(OffsetSCEVB, OffsetSCEVA);
1050f1c00a22SHaicheng Wu   const SCEVConstant *OffsetDeltaC = dyn_cast<SCEVConstant>(OffsetDeltaSCEV);
1051f1c00a22SHaicheng Wu   const APInt &OffsetDelta = OffsetDeltaC->getAPInt();
1052f1c00a22SHaicheng Wu   // Check if they are based on the same pointer. That makes the offsets
1053f1c00a22SHaicheng Wu   // sufficient.
1054f1c00a22SHaicheng Wu   if (PtrA == PtrB)
1055f1c00a22SHaicheng Wu     return OffsetDelta == Size;
1056f1c00a22SHaicheng Wu 
1057f1c00a22SHaicheng Wu   // Compute the necessary base pointer delta to have the necessary final delta
1058f1c00a22SHaicheng Wu   // equal to the size.
1059f1c00a22SHaicheng Wu   // BaseDelta = Size - OffsetDelta;
1060f1c00a22SHaicheng Wu   const SCEV *SizeSCEV = SE.getConstant(Size);
1061f1c00a22SHaicheng Wu   const SCEV *BaseDelta = SE.getMinusSCEV(SizeSCEV, OffsetDeltaSCEV);
1062f1c00a22SHaicheng Wu 
1063f1c00a22SHaicheng Wu   // Otherwise compute the distance with SCEV between the base pointers.
1064f1c00a22SHaicheng Wu   const SCEV *PtrSCEVA = SE.getSCEV(PtrA);
1065f1c00a22SHaicheng Wu   const SCEV *PtrSCEVB = SE.getSCEV(PtrB);
1066f1c00a22SHaicheng Wu   const SCEV *X = SE.getAddExpr(PtrSCEVA, BaseDelta);
1067f1c00a22SHaicheng Wu   return X == PtrSCEVB;
1068f1c00a22SHaicheng Wu }
1069f1c00a22SHaicheng Wu 
10709c926579SAdam Nemet bool MemoryDepChecker::Dependence::isSafeForVectorization(DepType Type) {
10719c926579SAdam Nemet   switch (Type) {
10729c926579SAdam Nemet   case NoDep:
10739c926579SAdam Nemet   case Forward:
10749c926579SAdam Nemet   case BackwardVectorizable:
10759c926579SAdam Nemet     return true;
10769c926579SAdam Nemet 
10779c926579SAdam Nemet   case Unknown:
10789c926579SAdam Nemet   case ForwardButPreventsForwarding:
10799c926579SAdam Nemet   case Backward:
10809c926579SAdam Nemet   case BackwardVectorizableButPreventsForwarding:
10819c926579SAdam Nemet     return false;
10829c926579SAdam Nemet   }
1083d388e930SDavid Majnemer   llvm_unreachable("unexpected DepType!");
10849c926579SAdam Nemet }
10859c926579SAdam Nemet 
1086397f5829SAdam Nemet bool MemoryDepChecker::Dependence::isBackward() const {
10879c926579SAdam Nemet   switch (Type) {
10889c926579SAdam Nemet   case NoDep:
10899c926579SAdam Nemet   case Forward:
10909c926579SAdam Nemet   case ForwardButPreventsForwarding:
1091397f5829SAdam Nemet   case Unknown:
10929c926579SAdam Nemet     return false;
10939c926579SAdam Nemet 
10949c926579SAdam Nemet   case BackwardVectorizable:
10959c926579SAdam Nemet   case Backward:
10969c926579SAdam Nemet   case BackwardVectorizableButPreventsForwarding:
10979c926579SAdam Nemet     return true;
10989c926579SAdam Nemet   }
1099d388e930SDavid Majnemer   llvm_unreachable("unexpected DepType!");
11009c926579SAdam Nemet }
11019c926579SAdam Nemet 
1102397f5829SAdam Nemet bool MemoryDepChecker::Dependence::isPossiblyBackward() const {
1103397f5829SAdam Nemet   return isBackward() || Type == Unknown;
1104397f5829SAdam Nemet }
1105397f5829SAdam Nemet 
1106397f5829SAdam Nemet bool MemoryDepChecker::Dependence::isForward() const {
1107397f5829SAdam Nemet   switch (Type) {
1108397f5829SAdam Nemet   case Forward:
1109397f5829SAdam Nemet   case ForwardButPreventsForwarding:
1110397f5829SAdam Nemet     return true;
1111397f5829SAdam Nemet 
1112397f5829SAdam Nemet   case NoDep:
1113397f5829SAdam Nemet   case Unknown:
1114397f5829SAdam Nemet   case BackwardVectorizable:
1115397f5829SAdam Nemet   case Backward:
1116397f5829SAdam Nemet   case BackwardVectorizableButPreventsForwarding:
1117397f5829SAdam Nemet     return false;
1118397f5829SAdam Nemet   }
1119397f5829SAdam Nemet   llvm_unreachable("unexpected DepType!");
1120397f5829SAdam Nemet }
1121397f5829SAdam Nemet 
11227afb46d3SDavid Majnemer bool MemoryDepChecker::couldPreventStoreLoadForward(uint64_t Distance,
11237afb46d3SDavid Majnemer                                                     uint64_t TypeByteSize) {
11240456327cSAdam Nemet   // If loads occur at a distance that is not a multiple of a feasible vector
11250456327cSAdam Nemet   // factor store-load forwarding does not take place.
11260456327cSAdam Nemet   // Positive dependences might cause troubles because vectorizing them might
11270456327cSAdam Nemet   // prevent store-load forwarding making vectorized code run a lot slower.
11280456327cSAdam Nemet   //   a[i] = a[i-3] ^ a[i-8];
11290456327cSAdam Nemet   //   The stores to a[i:i+1] don't align with the stores to a[i-3:i-2] and
11300456327cSAdam Nemet   //   hence on your typical architecture store-load forwarding does not take
11310456327cSAdam Nemet   //   place. Vectorizing in such cases does not make sense.
11320456327cSAdam Nemet   // Store-load forwarding distance.
1133884d313bSAdam Nemet 
1134884d313bSAdam Nemet   // After this many iterations store-to-load forwarding conflicts should not
1135884d313bSAdam Nemet   // cause any slowdowns.
11367afb46d3SDavid Majnemer   const uint64_t NumItersForStoreLoadThroughMemory = 8 * TypeByteSize;
11370456327cSAdam Nemet   // Maximum vector factor.
11387afb46d3SDavid Majnemer   uint64_t MaxVFWithoutSLForwardIssues = std::min(
11392c34ab51SAdam Nemet       VectorizerParams::MaxVectorWidth * TypeByteSize, MaxSafeDepDistBytes);
11400456327cSAdam Nemet 
1141884d313bSAdam Nemet   // Compute the smallest VF at which the store and load would be misaligned.
11427afb46d3SDavid Majnemer   for (uint64_t VF = 2 * TypeByteSize; VF <= MaxVFWithoutSLForwardIssues;
11439b5852aeSAdam Nemet        VF *= 2) {
1144884d313bSAdam Nemet     // If the number of vector iteration between the store and the load are
1145884d313bSAdam Nemet     // small we could incur conflicts.
1146884d313bSAdam Nemet     if (Distance % VF && Distance / VF < NumItersForStoreLoadThroughMemory) {
11479b5852aeSAdam Nemet       MaxVFWithoutSLForwardIssues = (VF >>= 1);
11480456327cSAdam Nemet       break;
11490456327cSAdam Nemet     }
11500456327cSAdam Nemet   }
11510456327cSAdam Nemet 
11520456327cSAdam Nemet   if (MaxVFWithoutSLForwardIssues < 2 * TypeByteSize) {
11539b5852aeSAdam Nemet     DEBUG(dbgs() << "LAA: Distance " << Distance
11549b5852aeSAdam Nemet                  << " that could cause a store-load forwarding conflict\n");
11550456327cSAdam Nemet     return true;
11560456327cSAdam Nemet   }
11570456327cSAdam Nemet 
11580456327cSAdam Nemet   if (MaxVFWithoutSLForwardIssues < MaxSafeDepDistBytes &&
1159f219c647SAdam Nemet       MaxVFWithoutSLForwardIssues !=
1160f219c647SAdam Nemet           VectorizerParams::MaxVectorWidth * TypeByteSize)
11610456327cSAdam Nemet     MaxSafeDepDistBytes = MaxVFWithoutSLForwardIssues;
11620456327cSAdam Nemet   return false;
11630456327cSAdam Nemet }
11640456327cSAdam Nemet 
1165751004a6SHao Liu /// \brief Check the dependence for two accesses with the same stride \p Stride.
1166751004a6SHao Liu /// \p Distance is the positive distance and \p TypeByteSize is type size in
1167751004a6SHao Liu /// bytes.
1168751004a6SHao Liu ///
1169751004a6SHao Liu /// \returns true if they are independent.
11707afb46d3SDavid Majnemer static bool areStridedAccessesIndependent(uint64_t Distance, uint64_t Stride,
11717afb46d3SDavid Majnemer                                           uint64_t TypeByteSize) {
1172751004a6SHao Liu   assert(Stride > 1 && "The stride must be greater than 1");
1173751004a6SHao Liu   assert(TypeByteSize > 0 && "The type size in byte must be non-zero");
1174751004a6SHao Liu   assert(Distance > 0 && "The distance must be non-zero");
1175751004a6SHao Liu 
1176751004a6SHao Liu   // Skip if the distance is not multiple of type byte size.
1177751004a6SHao Liu   if (Distance % TypeByteSize)
1178751004a6SHao Liu     return false;
1179751004a6SHao Liu 
11807afb46d3SDavid Majnemer   uint64_t ScaledDist = Distance / TypeByteSize;
1181751004a6SHao Liu 
1182751004a6SHao Liu   // No dependence if the scaled distance is not multiple of the stride.
1183751004a6SHao Liu   // E.g.
1184751004a6SHao Liu   //      for (i = 0; i < 1024 ; i += 4)
1185751004a6SHao Liu   //        A[i+2] = A[i] + 1;
1186751004a6SHao Liu   //
1187751004a6SHao Liu   // Two accesses in memory (scaled distance is 2, stride is 4):
1188751004a6SHao Liu   //     | A[0] |      |      |      | A[4] |      |      |      |
1189751004a6SHao Liu   //     |      |      | A[2] |      |      |      | A[6] |      |
1190751004a6SHao Liu   //
1191751004a6SHao Liu   // E.g.
1192751004a6SHao Liu   //      for (i = 0; i < 1024 ; i += 3)
1193751004a6SHao Liu   //        A[i+4] = A[i] + 1;
1194751004a6SHao Liu   //
1195751004a6SHao Liu   // Two accesses in memory (scaled distance is 4, stride is 3):
1196751004a6SHao Liu   //     | A[0] |      |      | A[3] |      |      | A[6] |      |      |
1197751004a6SHao Liu   //     |      |      |      |      | A[4] |      |      | A[7] |      |
1198751004a6SHao Liu   return ScaledDist % Stride;
1199751004a6SHao Liu }
1200751004a6SHao Liu 
12019c926579SAdam Nemet MemoryDepChecker::Dependence::DepType
12029c926579SAdam Nemet MemoryDepChecker::isDependent(const MemAccessInfo &A, unsigned AIdx,
12030456327cSAdam Nemet                               const MemAccessInfo &B, unsigned BIdx,
12048bc61df9SAdam Nemet                               const ValueToValueMap &Strides) {
12050456327cSAdam Nemet   assert (AIdx < BIdx && "Must pass arguments in program order");
12060456327cSAdam Nemet 
12070456327cSAdam Nemet   Value *APtr = A.getPointer();
12080456327cSAdam Nemet   Value *BPtr = B.getPointer();
12090456327cSAdam Nemet   bool AIsWrite = A.getInt();
12100456327cSAdam Nemet   bool BIsWrite = B.getInt();
12110456327cSAdam Nemet 
12120456327cSAdam Nemet   // Two reads are independent.
12130456327cSAdam Nemet   if (!AIsWrite && !BIsWrite)
12149c926579SAdam Nemet     return Dependence::NoDep;
12150456327cSAdam Nemet 
12160456327cSAdam Nemet   // We cannot check pointers in different address spaces.
12170456327cSAdam Nemet   if (APtr->getType()->getPointerAddressSpace() !=
12180456327cSAdam Nemet       BPtr->getType()->getPointerAddressSpace())
12199c926579SAdam Nemet     return Dependence::Unknown;
12200456327cSAdam Nemet 
12217afb46d3SDavid Majnemer   int64_t StrideAPtr = getPtrStride(PSE, APtr, InnermostLoop, Strides, true);
12227afb46d3SDavid Majnemer   int64_t StrideBPtr = getPtrStride(PSE, BPtr, InnermostLoop, Strides, true);
12230456327cSAdam Nemet 
1224adf4b739SSilviu Baranga   const SCEV *Src = PSE.getSCEV(APtr);
1225adf4b739SSilviu Baranga   const SCEV *Sink = PSE.getSCEV(BPtr);
12260456327cSAdam Nemet 
12270456327cSAdam Nemet   // If the induction step is negative we have to invert source and sink of the
12280456327cSAdam Nemet   // dependence.
12290456327cSAdam Nemet   if (StrideAPtr < 0) {
12300456327cSAdam Nemet     std::swap(APtr, BPtr);
12310456327cSAdam Nemet     std::swap(Src, Sink);
12320456327cSAdam Nemet     std::swap(AIsWrite, BIsWrite);
12330456327cSAdam Nemet     std::swap(AIdx, BIdx);
12340456327cSAdam Nemet     std::swap(StrideAPtr, StrideBPtr);
12350456327cSAdam Nemet   }
12360456327cSAdam Nemet 
12379cd9a7e3SSilviu Baranga   const SCEV *Dist = PSE.getSE()->getMinusSCEV(Sink, Src);
12380456327cSAdam Nemet 
1239339f42b3SAdam Nemet   DEBUG(dbgs() << "LAA: Src Scev: " << *Src << "Sink Scev: " << *Sink
12400456327cSAdam Nemet                << "(Induction step: " << StrideAPtr << ")\n");
1241339f42b3SAdam Nemet   DEBUG(dbgs() << "LAA: Distance for " << *InstMap[AIdx] << " to "
12420456327cSAdam Nemet                << *InstMap[BIdx] << ": " << *Dist << "\n");
12430456327cSAdam Nemet 
1244943befedSAdam Nemet   // Need accesses with constant stride. We don't want to vectorize
12450456327cSAdam Nemet   // "A[B[i]] += ..." and similar code or pointer arithmetic that could wrap in
12460456327cSAdam Nemet   // the address space.
12470456327cSAdam Nemet   if (!StrideAPtr || !StrideBPtr || StrideAPtr != StrideBPtr){
1248943befedSAdam Nemet     DEBUG(dbgs() << "Pointer access with non-constant stride\n");
12499c926579SAdam Nemet     return Dependence::Unknown;
12500456327cSAdam Nemet   }
12510456327cSAdam Nemet 
12520456327cSAdam Nemet   const SCEVConstant *C = dyn_cast<SCEVConstant>(Dist);
12530456327cSAdam Nemet   if (!C) {
1254339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Dependence because of non-constant distance\n");
12550456327cSAdam Nemet     ShouldRetryWithRuntimeCheck = true;
12569c926579SAdam Nemet     return Dependence::Unknown;
12570456327cSAdam Nemet   }
12580456327cSAdam Nemet 
12590456327cSAdam Nemet   Type *ATy = APtr->getType()->getPointerElementType();
12600456327cSAdam Nemet   Type *BTy = BPtr->getType()->getPointerElementType();
1261a28d91d8SMehdi Amini   auto &DL = InnermostLoop->getHeader()->getModule()->getDataLayout();
12627afb46d3SDavid Majnemer   uint64_t TypeByteSize = DL.getTypeAllocSize(ATy);
12630456327cSAdam Nemet 
12640de2feceSSanjoy Das   const APInt &Val = C->getAPInt();
12656feebe98SMatthew Simpson   int64_t Distance = Val.getSExtValue();
12667afb46d3SDavid Majnemer   uint64_t Stride = std::abs(StrideAPtr);
12676feebe98SMatthew Simpson 
12686feebe98SMatthew Simpson   // Attempt to prove strided accesses independent.
12696feebe98SMatthew Simpson   if (std::abs(Distance) > 0 && Stride > 1 && ATy == BTy &&
12706feebe98SMatthew Simpson       areStridedAccessesIndependent(std::abs(Distance), Stride, TypeByteSize)) {
12716feebe98SMatthew Simpson     DEBUG(dbgs() << "LAA: Strided accesses are independent\n");
12726feebe98SMatthew Simpson     return Dependence::NoDep;
12736feebe98SMatthew Simpson   }
12746feebe98SMatthew Simpson 
12756feebe98SMatthew Simpson   // Negative distances are not plausible dependencies.
12760456327cSAdam Nemet   if (Val.isNegative()) {
12770456327cSAdam Nemet     bool IsTrueDataDependence = (AIsWrite && !BIsWrite);
127837ec5f91SMatthew Simpson     if (IsTrueDataDependence && EnableForwardingConflictDetection &&
12790456327cSAdam Nemet         (couldPreventStoreLoadForward(Val.abs().getZExtValue(), TypeByteSize) ||
1280b8486e5aSAdam Nemet          ATy != BTy)) {
1281b8486e5aSAdam Nemet       DEBUG(dbgs() << "LAA: Forward but may prevent st->ld forwarding\n");
12829c926579SAdam Nemet       return Dependence::ForwardButPreventsForwarding;
1283b8486e5aSAdam Nemet     }
12840456327cSAdam Nemet 
1285724ab223SAdam Nemet     DEBUG(dbgs() << "LAA: Dependence is negative\n");
12869c926579SAdam Nemet     return Dependence::Forward;
12870456327cSAdam Nemet   }
12880456327cSAdam Nemet 
12890456327cSAdam Nemet   // Write to the same location with the same size.
12900456327cSAdam Nemet   // Could be improved to assert type sizes are the same (i32 == float, etc).
12910456327cSAdam Nemet   if (Val == 0) {
12920456327cSAdam Nemet     if (ATy == BTy)
1293d7037c56SAdam Nemet       return Dependence::Forward;
1294339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Zero dependence difference but different types\n");
12959c926579SAdam Nemet     return Dependence::Unknown;
12960456327cSAdam Nemet   }
12970456327cSAdam Nemet 
12980456327cSAdam Nemet   assert(Val.isStrictlyPositive() && "Expect a positive value");
12990456327cSAdam Nemet 
13000456327cSAdam Nemet   if (ATy != BTy) {
130104d4163eSAdam Nemet     DEBUG(dbgs() <<
1302339f42b3SAdam Nemet           "LAA: ReadWrite-Write positive dependency with different types\n");
13039c926579SAdam Nemet     return Dependence::Unknown;
13040456327cSAdam Nemet   }
13050456327cSAdam Nemet 
13060456327cSAdam Nemet   // Bail out early if passed-in parameters make vectorization not feasible.
1307f219c647SAdam Nemet   unsigned ForcedFactor = (VectorizerParams::VectorizationFactor ?
1308f219c647SAdam Nemet                            VectorizerParams::VectorizationFactor : 1);
1309f219c647SAdam Nemet   unsigned ForcedUnroll = (VectorizerParams::VectorizationInterleave ?
1310f219c647SAdam Nemet                            VectorizerParams::VectorizationInterleave : 1);
1311751004a6SHao Liu   // The minimum number of iterations for a vectorized/unrolled version.
1312751004a6SHao Liu   unsigned MinNumIter = std::max(ForcedFactor * ForcedUnroll, 2U);
13130456327cSAdam Nemet 
1314751004a6SHao Liu   // It's not vectorizable if the distance is smaller than the minimum distance
1315751004a6SHao Liu   // needed for a vectroized/unrolled version. Vectorizing one iteration in
1316751004a6SHao Liu   // front needs TypeByteSize * Stride. Vectorizing the last iteration needs
1317751004a6SHao Liu   // TypeByteSize (No need to plus the last gap distance).
1318751004a6SHao Liu   //
1319751004a6SHao Liu   // E.g. Assume one char is 1 byte in memory and one int is 4 bytes.
1320751004a6SHao Liu   //      foo(int *A) {
1321751004a6SHao Liu   //        int *B = (int *)((char *)A + 14);
1322751004a6SHao Liu   //        for (i = 0 ; i < 1024 ; i += 2)
1323751004a6SHao Liu   //          B[i] = A[i] + 1;
1324751004a6SHao Liu   //      }
1325751004a6SHao Liu   //
1326751004a6SHao Liu   // Two accesses in memory (stride is 2):
1327751004a6SHao Liu   //     | A[0] |      | A[2] |      | A[4] |      | A[6] |      |
1328751004a6SHao Liu   //                              | B[0] |      | B[2] |      | B[4] |
1329751004a6SHao Liu   //
1330751004a6SHao Liu   // Distance needs for vectorizing iterations except the last iteration:
1331751004a6SHao Liu   // 4 * 2 * (MinNumIter - 1). Distance needs for the last iteration: 4.
1332751004a6SHao Liu   // So the minimum distance needed is: 4 * 2 * (MinNumIter - 1) + 4.
1333751004a6SHao Liu   //
1334751004a6SHao Liu   // If MinNumIter is 2, it is vectorizable as the minimum distance needed is
1335751004a6SHao Liu   // 12, which is less than distance.
1336751004a6SHao Liu   //
1337751004a6SHao Liu   // If MinNumIter is 4 (Say if a user forces the vectorization factor to be 4),
1338751004a6SHao Liu   // the minimum distance needed is 28, which is greater than distance. It is
1339751004a6SHao Liu   // not safe to do vectorization.
13407afb46d3SDavid Majnemer   uint64_t MinDistanceNeeded =
1341751004a6SHao Liu       TypeByteSize * Stride * (MinNumIter - 1) + TypeByteSize;
13427afb46d3SDavid Majnemer   if (MinDistanceNeeded > static_cast<uint64_t>(Distance)) {
1343751004a6SHao Liu     DEBUG(dbgs() << "LAA: Failure because of positive distance " << Distance
1344751004a6SHao Liu                  << '\n');
1345751004a6SHao Liu     return Dependence::Backward;
1346751004a6SHao Liu   }
1347751004a6SHao Liu 
1348751004a6SHao Liu   // Unsafe if the minimum distance needed is greater than max safe distance.
1349751004a6SHao Liu   if (MinDistanceNeeded > MaxSafeDepDistBytes) {
1350751004a6SHao Liu     DEBUG(dbgs() << "LAA: Failure because it needs at least "
1351751004a6SHao Liu                  << MinDistanceNeeded << " size in bytes");
13529c926579SAdam Nemet     return Dependence::Backward;
13530456327cSAdam Nemet   }
13540456327cSAdam Nemet 
13559cc0c399SAdam Nemet   // Positive distance bigger than max vectorization factor.
1356751004a6SHao Liu   // FIXME: Should use max factor instead of max distance in bytes, which could
1357751004a6SHao Liu   // not handle different types.
1358751004a6SHao Liu   // E.g. Assume one char is 1 byte in memory and one int is 4 bytes.
1359751004a6SHao Liu   //      void foo (int *A, char *B) {
1360751004a6SHao Liu   //        for (unsigned i = 0; i < 1024; i++) {
1361751004a6SHao Liu   //          A[i+2] = A[i] + 1;
1362751004a6SHao Liu   //          B[i+2] = B[i] + 1;
1363751004a6SHao Liu   //        }
1364751004a6SHao Liu   //      }
1365751004a6SHao Liu   //
1366751004a6SHao Liu   // This case is currently unsafe according to the max safe distance. If we
1367751004a6SHao Liu   // analyze the two accesses on array B, the max safe dependence distance
1368751004a6SHao Liu   // is 2. Then we analyze the accesses on array A, the minimum distance needed
1369751004a6SHao Liu   // is 8, which is less than 2 and forbidden vectorization, But actually
1370751004a6SHao Liu   // both A and B could be vectorized by 2 iterations.
1371751004a6SHao Liu   MaxSafeDepDistBytes =
13727afb46d3SDavid Majnemer       std::min(static_cast<uint64_t>(Distance), MaxSafeDepDistBytes);
13730456327cSAdam Nemet 
13740456327cSAdam Nemet   bool IsTrueDataDependence = (!AIsWrite && BIsWrite);
137537ec5f91SMatthew Simpson   if (IsTrueDataDependence && EnableForwardingConflictDetection &&
13760456327cSAdam Nemet       couldPreventStoreLoadForward(Distance, TypeByteSize))
13779c926579SAdam Nemet     return Dependence::BackwardVectorizableButPreventsForwarding;
13780456327cSAdam Nemet 
1379751004a6SHao Liu   DEBUG(dbgs() << "LAA: Positive distance " << Val.getSExtValue()
1380751004a6SHao Liu                << " with max VF = "
1381751004a6SHao Liu                << MaxSafeDepDistBytes / (TypeByteSize * Stride) << '\n');
13820456327cSAdam Nemet 
13839c926579SAdam Nemet   return Dependence::BackwardVectorizable;
13840456327cSAdam Nemet }
13850456327cSAdam Nemet 
1386dee666bcSAdam Nemet bool MemoryDepChecker::areDepsSafe(DepCandidates &AccessSets,
13870456327cSAdam Nemet                                    MemAccessInfoSet &CheckDeps,
13888bc61df9SAdam Nemet                                    const ValueToValueMap &Strides) {
13890456327cSAdam Nemet 
13907afb46d3SDavid Majnemer   MaxSafeDepDistBytes = -1;
13910456327cSAdam Nemet   while (!CheckDeps.empty()) {
13920456327cSAdam Nemet     MemAccessInfo CurAccess = *CheckDeps.begin();
13930456327cSAdam Nemet 
13940456327cSAdam Nemet     // Get the relevant memory access set.
13950456327cSAdam Nemet     EquivalenceClasses<MemAccessInfo>::iterator I =
13960456327cSAdam Nemet       AccessSets.findValue(AccessSets.getLeaderValue(CurAccess));
13970456327cSAdam Nemet 
13980456327cSAdam Nemet     // Check accesses within this set.
13997a083814SRichard Trieu     EquivalenceClasses<MemAccessInfo>::member_iterator AI =
14007a083814SRichard Trieu         AccessSets.member_begin(I);
14017a083814SRichard Trieu     EquivalenceClasses<MemAccessInfo>::member_iterator AE =
14027a083814SRichard Trieu         AccessSets.member_end();
14030456327cSAdam Nemet 
14040456327cSAdam Nemet     // Check every access pair.
14050456327cSAdam Nemet     while (AI != AE) {
14060456327cSAdam Nemet       CheckDeps.erase(*AI);
14070456327cSAdam Nemet       EquivalenceClasses<MemAccessInfo>::member_iterator OI = std::next(AI);
14080456327cSAdam Nemet       while (OI != AE) {
14090456327cSAdam Nemet         // Check every accessing instruction pair in program order.
14100456327cSAdam Nemet         for (std::vector<unsigned>::iterator I1 = Accesses[*AI].begin(),
14110456327cSAdam Nemet              I1E = Accesses[*AI].end(); I1 != I1E; ++I1)
14120456327cSAdam Nemet           for (std::vector<unsigned>::iterator I2 = Accesses[*OI].begin(),
14130456327cSAdam Nemet                I2E = Accesses[*OI].end(); I2 != I2E; ++I2) {
14149c926579SAdam Nemet             auto A = std::make_pair(&*AI, *I1);
14159c926579SAdam Nemet             auto B = std::make_pair(&*OI, *I2);
14169c926579SAdam Nemet 
14179c926579SAdam Nemet             assert(*I1 != *I2);
14189c926579SAdam Nemet             if (*I1 > *I2)
14199c926579SAdam Nemet               std::swap(A, B);
14209c926579SAdam Nemet 
14219c926579SAdam Nemet             Dependence::DepType Type =
14229c926579SAdam Nemet                 isDependent(*A.first, A.second, *B.first, B.second, Strides);
14239c926579SAdam Nemet             SafeForVectorization &= Dependence::isSafeForVectorization(Type);
14249c926579SAdam Nemet 
1425a2df750fSAdam Nemet             // Gather dependences unless we accumulated MaxDependences
14269c926579SAdam Nemet             // dependences.  In that case return as soon as we find the first
14279c926579SAdam Nemet             // unsafe dependence.  This puts a limit on this quadratic
14289c926579SAdam Nemet             // algorithm.
1429a2df750fSAdam Nemet             if (RecordDependences) {
1430a2df750fSAdam Nemet               if (Type != Dependence::NoDep)
1431a2df750fSAdam Nemet                 Dependences.push_back(Dependence(A.second, B.second, Type));
14329c926579SAdam Nemet 
1433a2df750fSAdam Nemet               if (Dependences.size() >= MaxDependences) {
1434a2df750fSAdam Nemet                 RecordDependences = false;
1435a2df750fSAdam Nemet                 Dependences.clear();
14369c926579SAdam Nemet                 DEBUG(dbgs() << "Too many dependences, stopped recording\n");
14379c926579SAdam Nemet               }
14389c926579SAdam Nemet             }
1439a2df750fSAdam Nemet             if (!RecordDependences && !SafeForVectorization)
14400456327cSAdam Nemet               return false;
14410456327cSAdam Nemet           }
14420456327cSAdam Nemet         ++OI;
14430456327cSAdam Nemet       }
14440456327cSAdam Nemet       AI++;
14450456327cSAdam Nemet     }
14460456327cSAdam Nemet   }
14479c926579SAdam Nemet 
1448a2df750fSAdam Nemet   DEBUG(dbgs() << "Total Dependences: " << Dependences.size() << "\n");
14499c926579SAdam Nemet   return SafeForVectorization;
14500456327cSAdam Nemet }
14510456327cSAdam Nemet 
1452ec1e2bb6SAdam Nemet SmallVector<Instruction *, 4>
1453ec1e2bb6SAdam Nemet MemoryDepChecker::getInstructionsForAccess(Value *Ptr, bool isWrite) const {
1454ec1e2bb6SAdam Nemet   MemAccessInfo Access(Ptr, isWrite);
1455ec1e2bb6SAdam Nemet   auto &IndexVector = Accesses.find(Access)->second;
1456ec1e2bb6SAdam Nemet 
1457ec1e2bb6SAdam Nemet   SmallVector<Instruction *, 4> Insts;
14582d006e76SDavid Majnemer   transform(IndexVector,
1459ec1e2bb6SAdam Nemet                  std::back_inserter(Insts),
1460ec1e2bb6SAdam Nemet                  [&](unsigned Idx) { return this->InstMap[Idx]; });
1461ec1e2bb6SAdam Nemet   return Insts;
1462ec1e2bb6SAdam Nemet }
1463ec1e2bb6SAdam Nemet 
146458913d65SAdam Nemet const char *MemoryDepChecker::Dependence::DepName[] = {
146558913d65SAdam Nemet     "NoDep", "Unknown", "Forward", "ForwardButPreventsForwarding", "Backward",
146658913d65SAdam Nemet     "BackwardVectorizable", "BackwardVectorizableButPreventsForwarding"};
146758913d65SAdam Nemet 
146858913d65SAdam Nemet void MemoryDepChecker::Dependence::print(
146958913d65SAdam Nemet     raw_ostream &OS, unsigned Depth,
147058913d65SAdam Nemet     const SmallVectorImpl<Instruction *> &Instrs) const {
147158913d65SAdam Nemet   OS.indent(Depth) << DepName[Type] << ":\n";
147258913d65SAdam Nemet   OS.indent(Depth + 2) << *Instrs[Source] << " -> \n";
147358913d65SAdam Nemet   OS.indent(Depth + 2) << *Instrs[Destination] << "\n";
147458913d65SAdam Nemet }
147558913d65SAdam Nemet 
1476929c38e8SAdam Nemet bool LoopAccessInfo::canAnalyzeLoop() {
14778dcb3b6aSAdam Nemet   // We need to have a loop header.
1478d8968f09SAdam Nemet   DEBUG(dbgs() << "LAA: Found a loop in "
1479d8968f09SAdam Nemet                << TheLoop->getHeader()->getParent()->getName() << ": "
1480d8968f09SAdam Nemet                << TheLoop->getHeader()->getName() << '\n');
14818dcb3b6aSAdam Nemet 
1482929c38e8SAdam Nemet   // We can only analyze innermost loops.
1483929c38e8SAdam Nemet   if (!TheLoop->empty()) {
14848dcb3b6aSAdam Nemet     DEBUG(dbgs() << "LAA: loop is not the innermost loop\n");
1485877ccee8SAdam Nemet     recordAnalysis("NotInnerMostLoop") << "loop is not the innermost loop";
1486929c38e8SAdam Nemet     return false;
1487929c38e8SAdam Nemet   }
1488929c38e8SAdam Nemet 
1489929c38e8SAdam Nemet   // We must have a single backedge.
1490929c38e8SAdam Nemet   if (TheLoop->getNumBackEdges() != 1) {
14918dcb3b6aSAdam Nemet     DEBUG(dbgs() << "LAA: loop control flow is not understood by analyzer\n");
1492877ccee8SAdam Nemet     recordAnalysis("CFGNotUnderstood")
1493877ccee8SAdam Nemet         << "loop control flow is not understood by analyzer";
1494929c38e8SAdam Nemet     return false;
1495929c38e8SAdam Nemet   }
1496929c38e8SAdam Nemet 
1497929c38e8SAdam Nemet   // We must have a single exiting block.
1498929c38e8SAdam Nemet   if (!TheLoop->getExitingBlock()) {
14998dcb3b6aSAdam Nemet     DEBUG(dbgs() << "LAA: loop control flow is not understood by analyzer\n");
1500877ccee8SAdam Nemet     recordAnalysis("CFGNotUnderstood")
1501877ccee8SAdam Nemet         << "loop control flow is not understood by analyzer";
1502929c38e8SAdam Nemet     return false;
1503929c38e8SAdam Nemet   }
1504929c38e8SAdam Nemet 
1505929c38e8SAdam Nemet   // We only handle bottom-tested loops, i.e. loop in which the condition is
1506929c38e8SAdam Nemet   // checked at the end of each iteration. With that we can assume that all
1507929c38e8SAdam Nemet   // instructions in the loop are executed the same number of times.
1508929c38e8SAdam Nemet   if (TheLoop->getExitingBlock() != TheLoop->getLoopLatch()) {
15098dcb3b6aSAdam Nemet     DEBUG(dbgs() << "LAA: loop control flow is not understood by analyzer\n");
1510877ccee8SAdam Nemet     recordAnalysis("CFGNotUnderstood")
1511877ccee8SAdam Nemet         << "loop control flow is not understood by analyzer";
1512929c38e8SAdam Nemet     return false;
1513929c38e8SAdam Nemet   }
1514929c38e8SAdam Nemet 
1515929c38e8SAdam Nemet   // ScalarEvolution needs to be able to find the exit count.
151694734eefSXinliang David Li   const SCEV *ExitCount = PSE->getBackedgeTakenCount();
151794734eefSXinliang David Li   if (ExitCount == PSE->getSE()->getCouldNotCompute()) {
1518877ccee8SAdam Nemet     recordAnalysis("CantComputeNumberOfIterations")
1519877ccee8SAdam Nemet         << "could not determine number of loop iterations";
1520929c38e8SAdam Nemet     DEBUG(dbgs() << "LAA: SCEV could not compute the loop exit count.\n");
1521929c38e8SAdam Nemet     return false;
1522929c38e8SAdam Nemet   }
1523929c38e8SAdam Nemet 
1524929c38e8SAdam Nemet   return true;
1525929c38e8SAdam Nemet }
1526929c38e8SAdam Nemet 
1527b49d9a56SAdam Nemet void LoopAccessInfo::analyzeLoop(AliasAnalysis *AA, LoopInfo *LI,
15287da74abfSAdam Nemet                                  const TargetLibraryInfo *TLI,
15297da74abfSAdam Nemet                                  DominatorTree *DT) {
15300456327cSAdam Nemet   typedef SmallPtrSet<Value*, 16> ValueSet;
15310456327cSAdam Nemet 
1532e3e3b994SMatthew Simpson   // Holds the Load and Store instructions.
1533e3e3b994SMatthew Simpson   SmallVector<LoadInst *, 16> Loads;
1534e3e3b994SMatthew Simpson   SmallVector<StoreInst *, 16> Stores;
15350456327cSAdam Nemet 
15360456327cSAdam Nemet   // Holds all the different accesses in the loop.
15370456327cSAdam Nemet   unsigned NumReads = 0;
15380456327cSAdam Nemet   unsigned NumReadWrites = 0;
15390456327cSAdam Nemet 
1540ce030acbSXinliang David Li   PtrRtChecking->Pointers.clear();
1541ce030acbSXinliang David Li   PtrRtChecking->Need = false;
15420456327cSAdam Nemet 
15430456327cSAdam Nemet   const bool IsAnnotatedParallel = TheLoop->isAnnotatedParallel();
15440456327cSAdam Nemet 
15450456327cSAdam Nemet   // For each block.
15468b401013SDavid Majnemer   for (BasicBlock *BB : TheLoop->blocks()) {
15470456327cSAdam Nemet     // Scan the BB and collect legal loads and stores.
15488b401013SDavid Majnemer     for (Instruction &I : *BB) {
15490456327cSAdam Nemet       // If this is a load, save it. If this instruction can read from memory
15500456327cSAdam Nemet       // but is not a load, then we quit. Notice that we don't handle function
15510456327cSAdam Nemet       // calls that read or write.
15528b401013SDavid Majnemer       if (I.mayReadFromMemory()) {
15530456327cSAdam Nemet         // Many math library functions read the rounding mode. We will only
15540456327cSAdam Nemet         // vectorize a loop if it contains known function calls that don't set
15550456327cSAdam Nemet         // the flag. Therefore, it is safe to ignore this read from memory.
15568b401013SDavid Majnemer         auto *Call = dyn_cast<CallInst>(&I);
1557b4b27230SDavid Majnemer         if (Call && getVectorIntrinsicIDForCall(Call, TLI))
15580456327cSAdam Nemet           continue;
15590456327cSAdam Nemet 
15609b3cf604SMichael Zolotukhin         // If the function has an explicit vectorized counterpart, we can safely
15619b3cf604SMichael Zolotukhin         // assume that it can be vectorized.
15629b3cf604SMichael Zolotukhin         if (Call && !Call->isNoBuiltin() && Call->getCalledFunction() &&
15639b3cf604SMichael Zolotukhin             TLI->isFunctionVectorizable(Call->getCalledFunction()->getName()))
15649b3cf604SMichael Zolotukhin           continue;
15659b3cf604SMichael Zolotukhin 
15668b401013SDavid Majnemer         auto *Ld = dyn_cast<LoadInst>(&I);
15670456327cSAdam Nemet         if (!Ld || (!Ld->isSimple() && !IsAnnotatedParallel)) {
1568877ccee8SAdam Nemet           recordAnalysis("NonSimpleLoad", Ld)
1569877ccee8SAdam Nemet               << "read with atomic ordering or volatile read";
1570339f42b3SAdam Nemet           DEBUG(dbgs() << "LAA: Found a non-simple load.\n");
1571436018c3SAdam Nemet           CanVecMem = false;
1572436018c3SAdam Nemet           return;
15730456327cSAdam Nemet         }
15740456327cSAdam Nemet         NumLoads++;
15750456327cSAdam Nemet         Loads.push_back(Ld);
1576ce030acbSXinliang David Li         DepChecker->addAccess(Ld);
1577a9f09c62SAdam Nemet         if (EnableMemAccessVersioning)
1578c953bb99SAdam Nemet           collectStridedAccess(Ld);
15790456327cSAdam Nemet         continue;
15800456327cSAdam Nemet       }
15810456327cSAdam Nemet 
15820456327cSAdam Nemet       // Save 'store' instructions. Abort if other instructions write to memory.
15838b401013SDavid Majnemer       if (I.mayWriteToMemory()) {
15848b401013SDavid Majnemer         auto *St = dyn_cast<StoreInst>(&I);
15850456327cSAdam Nemet         if (!St) {
1586877ccee8SAdam Nemet           recordAnalysis("CantVectorizeInstruction", St)
1587877ccee8SAdam Nemet               << "instruction cannot be vectorized";
1588436018c3SAdam Nemet           CanVecMem = false;
1589436018c3SAdam Nemet           return;
15900456327cSAdam Nemet         }
15910456327cSAdam Nemet         if (!St->isSimple() && !IsAnnotatedParallel) {
1592877ccee8SAdam Nemet           recordAnalysis("NonSimpleStore", St)
1593877ccee8SAdam Nemet               << "write with atomic ordering or volatile write";
1594339f42b3SAdam Nemet           DEBUG(dbgs() << "LAA: Found a non-simple store.\n");
1595436018c3SAdam Nemet           CanVecMem = false;
1596436018c3SAdam Nemet           return;
15970456327cSAdam Nemet         }
15980456327cSAdam Nemet         NumStores++;
15990456327cSAdam Nemet         Stores.push_back(St);
1600ce030acbSXinliang David Li         DepChecker->addAccess(St);
1601a9f09c62SAdam Nemet         if (EnableMemAccessVersioning)
1602c953bb99SAdam Nemet           collectStridedAccess(St);
16030456327cSAdam Nemet       }
16040456327cSAdam Nemet     } // Next instr.
16050456327cSAdam Nemet   } // Next block.
16060456327cSAdam Nemet 
16070456327cSAdam Nemet   // Now we have two lists that hold the loads and the stores.
16080456327cSAdam Nemet   // Next, we find the pointers that they use.
16090456327cSAdam Nemet 
16100456327cSAdam Nemet   // Check if we see any stores. If there are no stores, then we don't
16110456327cSAdam Nemet   // care if the pointers are *restrict*.
16120456327cSAdam Nemet   if (!Stores.size()) {
1613339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Found a read-only loop!\n");
1614436018c3SAdam Nemet     CanVecMem = true;
1615436018c3SAdam Nemet     return;
16160456327cSAdam Nemet   }
16170456327cSAdam Nemet 
1618dee666bcSAdam Nemet   MemoryDepChecker::DepCandidates DependentAccesses;
1619a28d91d8SMehdi Amini   AccessAnalysis Accesses(TheLoop->getHeader()->getModule()->getDataLayout(),
162094734eefSXinliang David Li                           AA, LI, DependentAccesses, *PSE);
16210456327cSAdam Nemet 
16220456327cSAdam Nemet   // Holds the analyzed pointers. We don't want to call GetUnderlyingObjects
16230456327cSAdam Nemet   // multiple times on the same object. If the ptr is accessed twice, once
16240456327cSAdam Nemet   // for read and once for write, it will only appear once (on the write
16250456327cSAdam Nemet   // list). This is okay, since we are going to check for conflicts between
16260456327cSAdam Nemet   // writes and between reads and writes, but not between reads and reads.
16270456327cSAdam Nemet   ValueSet Seen;
16280456327cSAdam Nemet 
1629e3e3b994SMatthew Simpson   for (StoreInst *ST : Stores) {
16300456327cSAdam Nemet     Value *Ptr = ST->getPointerOperand();
1631ce48250fSAdam Nemet     // Check for store to loop invariant address.
1632ce48250fSAdam Nemet     StoreToLoopInvariantAddress |= isUniform(Ptr);
16330456327cSAdam Nemet     // If we did *not* see this pointer before, insert it to  the read-write
16340456327cSAdam Nemet     // list. At this phase it is only a 'write' list.
16350456327cSAdam Nemet     if (Seen.insert(Ptr).second) {
16360456327cSAdam Nemet       ++NumReadWrites;
16370456327cSAdam Nemet 
1638ac80dc75SChandler Carruth       MemoryLocation Loc = MemoryLocation::get(ST);
16390456327cSAdam Nemet       // The TBAA metadata could have a control dependency on the predication
16400456327cSAdam Nemet       // condition, so we cannot rely on it when determining whether or not we
16410456327cSAdam Nemet       // need runtime pointer checks.
164201abb2c3SAdam Nemet       if (blockNeedsPredication(ST->getParent(), TheLoop, DT))
16430456327cSAdam Nemet         Loc.AATags.TBAA = nullptr;
16440456327cSAdam Nemet 
16450456327cSAdam Nemet       Accesses.addStore(Loc);
16460456327cSAdam Nemet     }
16470456327cSAdam Nemet   }
16480456327cSAdam Nemet 
16490456327cSAdam Nemet   if (IsAnnotatedParallel) {
165004d4163eSAdam Nemet     DEBUG(dbgs()
1651339f42b3SAdam Nemet           << "LAA: A loop annotated parallel, ignore memory dependency "
16520456327cSAdam Nemet           << "checks.\n");
1653436018c3SAdam Nemet     CanVecMem = true;
1654436018c3SAdam Nemet     return;
16550456327cSAdam Nemet   }
16560456327cSAdam Nemet 
1657e3e3b994SMatthew Simpson   for (LoadInst *LD : Loads) {
16580456327cSAdam Nemet     Value *Ptr = LD->getPointerOperand();
16590456327cSAdam Nemet     // If we did *not* see this pointer before, insert it to the
16600456327cSAdam Nemet     // read list. If we *did* see it before, then it is already in
16610456327cSAdam Nemet     // the read-write list. This allows us to vectorize expressions
16620456327cSAdam Nemet     // such as A[i] += x;  Because the address of A[i] is a read-write
16630456327cSAdam Nemet     // pointer. This only works if the index of A[i] is consecutive.
16640456327cSAdam Nemet     // If the address of i is unknown (for example A[B[i]]) then we may
16650456327cSAdam Nemet     // read a few words, modify, and write a few words, and some of the
16660456327cSAdam Nemet     // words may be written to the same address.
16670456327cSAdam Nemet     bool IsReadOnlyPtr = false;
1668139ffba3SAdam Nemet     if (Seen.insert(Ptr).second ||
166994734eefSXinliang David Li         !getPtrStride(*PSE, Ptr, TheLoop, SymbolicStrides)) {
16700456327cSAdam Nemet       ++NumReads;
16710456327cSAdam Nemet       IsReadOnlyPtr = true;
16720456327cSAdam Nemet     }
16730456327cSAdam Nemet 
1674ac80dc75SChandler Carruth     MemoryLocation Loc = MemoryLocation::get(LD);
16750456327cSAdam Nemet     // The TBAA metadata could have a control dependency on the predication
16760456327cSAdam Nemet     // condition, so we cannot rely on it when determining whether or not we
16770456327cSAdam Nemet     // need runtime pointer checks.
167801abb2c3SAdam Nemet     if (blockNeedsPredication(LD->getParent(), TheLoop, DT))
16790456327cSAdam Nemet       Loc.AATags.TBAA = nullptr;
16800456327cSAdam Nemet 
16810456327cSAdam Nemet     Accesses.addLoad(Loc, IsReadOnlyPtr);
16820456327cSAdam Nemet   }
16830456327cSAdam Nemet 
16840456327cSAdam Nemet   // If we write (or read-write) to a single destination and there are no
16850456327cSAdam Nemet   // other reads in this loop then is it safe to vectorize.
16860456327cSAdam Nemet   if (NumReadWrites == 1 && NumReads == 0) {
1687339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Found a write-only loop!\n");
1688436018c3SAdam Nemet     CanVecMem = true;
1689436018c3SAdam Nemet     return;
16900456327cSAdam Nemet   }
16910456327cSAdam Nemet 
16920456327cSAdam Nemet   // Build dependence sets and check whether we need a runtime pointer bounds
16930456327cSAdam Nemet   // check.
16940456327cSAdam Nemet   Accesses.buildDependenceSets();
16950456327cSAdam Nemet 
16960456327cSAdam Nemet   // Find pointers with computable bounds. We are going to use this information
16970456327cSAdam Nemet   // to place a runtime bound check.
169894734eefSXinliang David Li   bool CanDoRTIfNeeded = Accesses.canCheckPtrAtRT(*PtrRtChecking, PSE->getSE(),
1699139ffba3SAdam Nemet                                                   TheLoop, SymbolicStrides);
1700ee61474aSAdam Nemet   if (!CanDoRTIfNeeded) {
1701877ccee8SAdam Nemet     recordAnalysis("CantIdentifyArrayBounds") << "cannot identify array bounds";
1702ee61474aSAdam Nemet     DEBUG(dbgs() << "LAA: We can't vectorize because we can't find "
1703ee61474aSAdam Nemet                  << "the array bounds.\n");
1704436018c3SAdam Nemet     CanVecMem = false;
1705436018c3SAdam Nemet     return;
17060456327cSAdam Nemet   }
17070456327cSAdam Nemet 
1708ee61474aSAdam Nemet   DEBUG(dbgs() << "LAA: We can perform a memory runtime check if needed.\n");
17090456327cSAdam Nemet 
1710436018c3SAdam Nemet   CanVecMem = true;
17110456327cSAdam Nemet   if (Accesses.isDependencyCheckNeeded()) {
1712339f42b3SAdam Nemet     DEBUG(dbgs() << "LAA: Checking memory dependencies\n");
1713ce030acbSXinliang David Li     CanVecMem = DepChecker->areDepsSafe(
1714139ffba3SAdam Nemet         DependentAccesses, Accesses.getDependenciesToCheck(), SymbolicStrides);
1715ce030acbSXinliang David Li     MaxSafeDepDistBytes = DepChecker->getMaxSafeDepDistBytes();
17160456327cSAdam Nemet 
1717ce030acbSXinliang David Li     if (!CanVecMem && DepChecker->shouldRetryWithRuntimeCheck()) {
1718339f42b3SAdam Nemet       DEBUG(dbgs() << "LAA: Retrying with memory checks\n");
17190456327cSAdam Nemet 
17200456327cSAdam Nemet       // Clear the dependency checks. We assume they are not needed.
1721ce030acbSXinliang David Li       Accesses.resetDepChecks(*DepChecker);
17220456327cSAdam Nemet 
1723ce030acbSXinliang David Li       PtrRtChecking->reset();
1724ce030acbSXinliang David Li       PtrRtChecking->Need = true;
17250456327cSAdam Nemet 
172694734eefSXinliang David Li       auto *SE = PSE->getSE();
1727ce030acbSXinliang David Li       CanDoRTIfNeeded = Accesses.canCheckPtrAtRT(*PtrRtChecking, SE, TheLoop,
1728139ffba3SAdam Nemet                                                  SymbolicStrides, true);
172998a13719SSilviu Baranga 
1730949e91a6SAdam Nemet       // Check that we found the bounds for the pointer.
1731ee61474aSAdam Nemet       if (!CanDoRTIfNeeded) {
1732877ccee8SAdam Nemet         recordAnalysis("CantCheckMemDepsAtRunTime")
1733877ccee8SAdam Nemet             << "cannot check memory dependencies at runtime";
1734b6dc76ffSAdam Nemet         DEBUG(dbgs() << "LAA: Can't vectorize with memory checks\n");
1735b6dc76ffSAdam Nemet         CanVecMem = false;
1736b6dc76ffSAdam Nemet         return;
1737b6dc76ffSAdam Nemet       }
1738b6dc76ffSAdam Nemet 
17390456327cSAdam Nemet       CanVecMem = true;
17400456327cSAdam Nemet     }
17410456327cSAdam Nemet   }
17420456327cSAdam Nemet 
17434bb90a71SAdam Nemet   if (CanVecMem)
17444bb90a71SAdam Nemet     DEBUG(dbgs() << "LAA: No unsafe dependent memory operations in loop.  We"
1745ce030acbSXinliang David Li                  << (PtrRtChecking->Need ? "" : " don't")
17460f67c6c1SAdam Nemet                  << " need runtime memory checks.\n");
17474bb90a71SAdam Nemet   else {
1748877ccee8SAdam Nemet     recordAnalysis("UnsafeMemDep")
17490a77dfadSAdam Nemet         << "unsafe dependent memory operations in loop. Use "
17500a77dfadSAdam Nemet            "#pragma loop distribute(enable) to allow loop distribution "
17510a77dfadSAdam Nemet            "to attempt to isolate the offending operations into a separate "
1752877ccee8SAdam Nemet            "loop";
17534bb90a71SAdam Nemet     DEBUG(dbgs() << "LAA: unsafe dependent memory operations in loop\n");
17544bb90a71SAdam Nemet   }
17550456327cSAdam Nemet }
17560456327cSAdam Nemet 
175701abb2c3SAdam Nemet bool LoopAccessInfo::blockNeedsPredication(BasicBlock *BB, Loop *TheLoop,
175801abb2c3SAdam Nemet                                            DominatorTree *DT)  {
17590456327cSAdam Nemet   assert(TheLoop->contains(BB) && "Unknown block used");
17600456327cSAdam Nemet 
17610456327cSAdam Nemet   // Blocks that do not dominate the latch need predication.
17620456327cSAdam Nemet   BasicBlock* Latch = TheLoop->getLoopLatch();
17630456327cSAdam Nemet   return !DT->dominates(BB, Latch);
17640456327cSAdam Nemet }
17650456327cSAdam Nemet 
1766877ccee8SAdam Nemet OptimizationRemarkAnalysis &LoopAccessInfo::recordAnalysis(StringRef RemarkName,
1767877ccee8SAdam Nemet                                                            Instruction *I) {
1768c922853bSAdam Nemet   assert(!Report && "Multiple reports generated");
1769877ccee8SAdam Nemet 
1770877ccee8SAdam Nemet   Value *CodeRegion = TheLoop->getHeader();
1771877ccee8SAdam Nemet   DebugLoc DL = TheLoop->getStartLoc();
1772877ccee8SAdam Nemet 
1773877ccee8SAdam Nemet   if (I) {
1774877ccee8SAdam Nemet     CodeRegion = I->getParent();
1775877ccee8SAdam Nemet     // If there is no debug location attached to the instruction, revert back to
1776877ccee8SAdam Nemet     // using the loop's.
1777877ccee8SAdam Nemet     if (I->getDebugLoc())
1778877ccee8SAdam Nemet       DL = I->getDebugLoc();
1779877ccee8SAdam Nemet   }
1780877ccee8SAdam Nemet 
1781877ccee8SAdam Nemet   Report = make_unique<OptimizationRemarkAnalysis>(DEBUG_TYPE, RemarkName, DL,
1782877ccee8SAdam Nemet                                                    CodeRegion);
1783877ccee8SAdam Nemet   return *Report;
17840456327cSAdam Nemet }
17850456327cSAdam Nemet 
178657ac766eSAdam Nemet bool LoopAccessInfo::isUniform(Value *V) const {
17873ceac2bbSMichael Kuperstein   auto *SE = PSE->getSE();
17883ceac2bbSMichael Kuperstein   // Since we rely on SCEV for uniformity, if the type is not SCEVable, it is
17893ceac2bbSMichael Kuperstein   // never considered uniform.
17903ceac2bbSMichael Kuperstein   // TODO: Is this really what we want? Even without FP SCEV, we may want some
17913ceac2bbSMichael Kuperstein   // trivially loop-invariant FP values to be considered uniform.
17923ceac2bbSMichael Kuperstein   if (!SE->isSCEVable(V->getType()))
17933ceac2bbSMichael Kuperstein     return false;
17943ceac2bbSMichael Kuperstein   return (SE->isLoopInvariant(SE->getSCEV(V), TheLoop));
17950456327cSAdam Nemet }
17967206d7a5SAdam Nemet 
17977206d7a5SAdam Nemet // FIXME: this function is currently a duplicate of the one in
17987206d7a5SAdam Nemet // LoopVectorize.cpp.
17997206d7a5SAdam Nemet static Instruction *getFirstInst(Instruction *FirstInst, Value *V,
18007206d7a5SAdam Nemet                                  Instruction *Loc) {
18017206d7a5SAdam Nemet   if (FirstInst)
18027206d7a5SAdam Nemet     return FirstInst;
18037206d7a5SAdam Nemet   if (Instruction *I = dyn_cast<Instruction>(V))
18047206d7a5SAdam Nemet     return I->getParent() == Loc->getParent() ? I : nullptr;
18057206d7a5SAdam Nemet   return nullptr;
18067206d7a5SAdam Nemet }
18077206d7a5SAdam Nemet 
1808039b1042SBenjamin Kramer namespace {
18094e533ef7SAdam Nemet /// \brief IR Values for the lower and upper bounds of a pointer evolution.  We
18104e533ef7SAdam Nemet /// need to use value-handles because SCEV expansion can invalidate previously
18114e533ef7SAdam Nemet /// expanded values.  Thus expansion of a pointer can invalidate the bounds for
18124e533ef7SAdam Nemet /// a previous one.
18131da7df37SAdam Nemet struct PointerBounds {
18144e533ef7SAdam Nemet   TrackingVH<Value> Start;
18154e533ef7SAdam Nemet   TrackingVH<Value> End;
18161da7df37SAdam Nemet };
1817039b1042SBenjamin Kramer } // end anonymous namespace
18187206d7a5SAdam Nemet 
18191da7df37SAdam Nemet /// \brief Expand code for the lower and upper bound of the pointer group \p CG
18201da7df37SAdam Nemet /// in \p TheLoop.  \return the values for the bounds.
18211da7df37SAdam Nemet static PointerBounds
18221da7df37SAdam Nemet expandBounds(const RuntimePointerChecking::CheckingPtrGroup *CG, Loop *TheLoop,
18231da7df37SAdam Nemet              Instruction *Loc, SCEVExpander &Exp, ScalarEvolution *SE,
18241da7df37SAdam Nemet              const RuntimePointerChecking &PtrRtChecking) {
18251da7df37SAdam Nemet   Value *Ptr = PtrRtChecking.Pointers[CG->Members[0]].PointerValue;
18267206d7a5SAdam Nemet   const SCEV *Sc = SE->getSCEV(Ptr);
18277206d7a5SAdam Nemet 
18287206d7a5SAdam Nemet   if (SE->isLoopInvariant(Sc, TheLoop)) {
18291b6b50a9SSilviu Baranga     DEBUG(dbgs() << "LAA: Adding RT check for a loop invariant ptr:" << *Ptr
18301b6b50a9SSilviu Baranga                  << "\n");
18311da7df37SAdam Nemet     return {Ptr, Ptr};
18327206d7a5SAdam Nemet   } else {
18337206d7a5SAdam Nemet     unsigned AS = Ptr->getType()->getPointerAddressSpace();
18341da7df37SAdam Nemet     LLVMContext &Ctx = Loc->getContext();
18357206d7a5SAdam Nemet 
18367206d7a5SAdam Nemet     // Use this type for pointer arithmetic.
18377206d7a5SAdam Nemet     Type *PtrArithTy = Type::getInt8PtrTy(Ctx, AS);
18381b6b50a9SSilviu Baranga     Value *Start = nullptr, *End = nullptr;
18397206d7a5SAdam Nemet 
18401b6b50a9SSilviu Baranga     DEBUG(dbgs() << "LAA: Adding RT check for range:\n");
18411da7df37SAdam Nemet     Start = Exp.expandCodeFor(CG->Low, PtrArithTy, Loc);
18421da7df37SAdam Nemet     End = Exp.expandCodeFor(CG->High, PtrArithTy, Loc);
18431da7df37SAdam Nemet     DEBUG(dbgs() << "Start: " << *CG->Low << " End: " << *CG->High << "\n");
18441da7df37SAdam Nemet     return {Start, End};
18457206d7a5SAdam Nemet   }
18467206d7a5SAdam Nemet }
18477206d7a5SAdam Nemet 
18481da7df37SAdam Nemet /// \brief Turns a collection of checks into a collection of expanded upper and
18491da7df37SAdam Nemet /// lower bounds for both pointers in the check.
18501da7df37SAdam Nemet static SmallVector<std::pair<PointerBounds, PointerBounds>, 4> expandBounds(
18511da7df37SAdam Nemet     const SmallVectorImpl<RuntimePointerChecking::PointerCheck> &PointerChecks,
18521da7df37SAdam Nemet     Loop *L, Instruction *Loc, ScalarEvolution *SE, SCEVExpander &Exp,
18531da7df37SAdam Nemet     const RuntimePointerChecking &PtrRtChecking) {
18541da7df37SAdam Nemet   SmallVector<std::pair<PointerBounds, PointerBounds>, 4> ChecksWithBounds;
18551da7df37SAdam Nemet 
18561da7df37SAdam Nemet   // Here we're relying on the SCEV Expander's cache to only emit code for the
18571da7df37SAdam Nemet   // same bounds once.
18582d006e76SDavid Majnemer   transform(
18592d006e76SDavid Majnemer       PointerChecks, std::back_inserter(ChecksWithBounds),
18601da7df37SAdam Nemet       [&](const RuntimePointerChecking::PointerCheck &Check) {
186194abbbd6SNAKAMURA Takumi         PointerBounds
186294abbbd6SNAKAMURA Takumi           First = expandBounds(Check.first, L, Loc, Exp, SE, PtrRtChecking),
186394abbbd6SNAKAMURA Takumi           Second = expandBounds(Check.second, L, Loc, Exp, SE, PtrRtChecking);
186494abbbd6SNAKAMURA Takumi         return std::make_pair(First, Second);
18651da7df37SAdam Nemet       });
18661da7df37SAdam Nemet 
18671da7df37SAdam Nemet   return ChecksWithBounds;
18681da7df37SAdam Nemet }
18691da7df37SAdam Nemet 
18705b0a4795SAdam Nemet std::pair<Instruction *, Instruction *> LoopAccessInfo::addRuntimeChecks(
18711da7df37SAdam Nemet     Instruction *Loc,
18721da7df37SAdam Nemet     const SmallVectorImpl<RuntimePointerChecking::PointerCheck> &PointerChecks)
18731da7df37SAdam Nemet     const {
18741824e411SAdam Nemet   const DataLayout &DL = TheLoop->getHeader()->getModule()->getDataLayout();
187594734eefSXinliang David Li   auto *SE = PSE->getSE();
18761824e411SAdam Nemet   SCEVExpander Exp(*SE, DL, "induction");
18771da7df37SAdam Nemet   auto ExpandedChecks =
1878ce030acbSXinliang David Li       expandBounds(PointerChecks, TheLoop, Loc, SE, Exp, *PtrRtChecking);
18791da7df37SAdam Nemet 
18801da7df37SAdam Nemet   LLVMContext &Ctx = Loc->getContext();
18811da7df37SAdam Nemet   Instruction *FirstInst = nullptr;
18827206d7a5SAdam Nemet   IRBuilder<> ChkBuilder(Loc);
18837206d7a5SAdam Nemet   // Our instructions might fold to a constant.
18847206d7a5SAdam Nemet   Value *MemoryRuntimeCheck = nullptr;
18851b6b50a9SSilviu Baranga 
18861da7df37SAdam Nemet   for (const auto &Check : ExpandedChecks) {
18871da7df37SAdam Nemet     const PointerBounds &A = Check.first, &B = Check.second;
1888cdb791cdSAdam Nemet     // Check if two pointers (A and B) conflict where conflict is computed as:
1889cdb791cdSAdam Nemet     // start(A) <= end(B) && start(B) <= end(A)
18901da7df37SAdam Nemet     unsigned AS0 = A.Start->getType()->getPointerAddressSpace();
18911da7df37SAdam Nemet     unsigned AS1 = B.Start->getType()->getPointerAddressSpace();
18927206d7a5SAdam Nemet 
18931da7df37SAdam Nemet     assert((AS0 == B.End->getType()->getPointerAddressSpace()) &&
18941da7df37SAdam Nemet            (AS1 == A.End->getType()->getPointerAddressSpace()) &&
18957206d7a5SAdam Nemet            "Trying to bounds check pointers with different address spaces");
18967206d7a5SAdam Nemet 
18977206d7a5SAdam Nemet     Type *PtrArithTy0 = Type::getInt8PtrTy(Ctx, AS0);
18987206d7a5SAdam Nemet     Type *PtrArithTy1 = Type::getInt8PtrTy(Ctx, AS1);
18997206d7a5SAdam Nemet 
19001da7df37SAdam Nemet     Value *Start0 = ChkBuilder.CreateBitCast(A.Start, PtrArithTy0, "bc");
19011da7df37SAdam Nemet     Value *Start1 = ChkBuilder.CreateBitCast(B.Start, PtrArithTy1, "bc");
19021da7df37SAdam Nemet     Value *End0 =   ChkBuilder.CreateBitCast(A.End,   PtrArithTy1, "bc");
19031da7df37SAdam Nemet     Value *End1 =   ChkBuilder.CreateBitCast(B.End,   PtrArithTy0, "bc");
19047206d7a5SAdam Nemet 
19053622fbfcSElena Demikhovsky     // [A|B].Start points to the first accessed byte under base [A|B].
19063622fbfcSElena Demikhovsky     // [A|B].End points to the last accessed byte, plus one.
19073622fbfcSElena Demikhovsky     // There is no conflict when the intervals are disjoint:
19083622fbfcSElena Demikhovsky     // NoConflict = (B.Start >= A.End) || (A.Start >= B.End)
19093622fbfcSElena Demikhovsky     //
19103622fbfcSElena Demikhovsky     // bound0 = (B.Start < A.End)
19113622fbfcSElena Demikhovsky     // bound1 = (A.Start < B.End)
19123622fbfcSElena Demikhovsky     //  IsConflict = bound0 & bound1
19133622fbfcSElena Demikhovsky     Value *Cmp0 = ChkBuilder.CreateICmpULT(Start0, End1, "bound0");
19147206d7a5SAdam Nemet     FirstInst = getFirstInst(FirstInst, Cmp0, Loc);
19153622fbfcSElena Demikhovsky     Value *Cmp1 = ChkBuilder.CreateICmpULT(Start1, End0, "bound1");
19167206d7a5SAdam Nemet     FirstInst = getFirstInst(FirstInst, Cmp1, Loc);
19177206d7a5SAdam Nemet     Value *IsConflict = ChkBuilder.CreateAnd(Cmp0, Cmp1, "found.conflict");
19187206d7a5SAdam Nemet     FirstInst = getFirstInst(FirstInst, IsConflict, Loc);
19197206d7a5SAdam Nemet     if (MemoryRuntimeCheck) {
19201da7df37SAdam Nemet       IsConflict =
19211da7df37SAdam Nemet           ChkBuilder.CreateOr(MemoryRuntimeCheck, IsConflict, "conflict.rdx");
19227206d7a5SAdam Nemet       FirstInst = getFirstInst(FirstInst, IsConflict, Loc);
19237206d7a5SAdam Nemet     }
19247206d7a5SAdam Nemet     MemoryRuntimeCheck = IsConflict;
19257206d7a5SAdam Nemet   }
19267206d7a5SAdam Nemet 
192790fec840SAdam Nemet   if (!MemoryRuntimeCheck)
192890fec840SAdam Nemet     return std::make_pair(nullptr, nullptr);
192990fec840SAdam Nemet 
19307206d7a5SAdam Nemet   // We have to do this trickery because the IRBuilder might fold the check to a
19317206d7a5SAdam Nemet   // constant expression in which case there is no Instruction anchored in a
19327206d7a5SAdam Nemet   // the block.
19337206d7a5SAdam Nemet   Instruction *Check = BinaryOperator::CreateAnd(MemoryRuntimeCheck,
19347206d7a5SAdam Nemet                                                  ConstantInt::getTrue(Ctx));
19357206d7a5SAdam Nemet   ChkBuilder.Insert(Check, "memcheck.conflict");
19367206d7a5SAdam Nemet   FirstInst = getFirstInst(FirstInst, Check, Loc);
19377206d7a5SAdam Nemet   return std::make_pair(FirstInst, Check);
19387206d7a5SAdam Nemet }
19393bfd93d7SAdam Nemet 
19405b0a4795SAdam Nemet std::pair<Instruction *, Instruction *>
19415b0a4795SAdam Nemet LoopAccessInfo::addRuntimeChecks(Instruction *Loc) const {
1942ce030acbSXinliang David Li   if (!PtrRtChecking->Need)
19431da7df37SAdam Nemet     return std::make_pair(nullptr, nullptr);
19441da7df37SAdam Nemet 
1945ce030acbSXinliang David Li   return addRuntimeChecks(Loc, PtrRtChecking->getChecks());
19461da7df37SAdam Nemet }
19471da7df37SAdam Nemet 
1948c953bb99SAdam Nemet void LoopAccessInfo::collectStridedAccess(Value *MemAccess) {
1949c953bb99SAdam Nemet   Value *Ptr = nullptr;
1950c953bb99SAdam Nemet   if (LoadInst *LI = dyn_cast<LoadInst>(MemAccess))
1951c953bb99SAdam Nemet     Ptr = LI->getPointerOperand();
1952c953bb99SAdam Nemet   else if (StoreInst *SI = dyn_cast<StoreInst>(MemAccess))
1953c953bb99SAdam Nemet     Ptr = SI->getPointerOperand();
1954c953bb99SAdam Nemet   else
1955c953bb99SAdam Nemet     return;
1956c953bb99SAdam Nemet 
195794734eefSXinliang David Li   Value *Stride = getStrideFromPointer(Ptr, PSE->getSE(), TheLoop);
1958c953bb99SAdam Nemet   if (!Stride)
1959c953bb99SAdam Nemet     return;
1960c953bb99SAdam Nemet 
1961c953bb99SAdam Nemet   DEBUG(dbgs() << "LAA: Found a strided access that we can version");
1962c953bb99SAdam Nemet   DEBUG(dbgs() << "  Ptr: " << *Ptr << " Stride: " << *Stride << "\n");
1963c953bb99SAdam Nemet   SymbolicStrides[Ptr] = Stride;
1964c953bb99SAdam Nemet   StrideSet.insert(Stride);
1965c953bb99SAdam Nemet }
1966c953bb99SAdam Nemet 
19673bfd93d7SAdam Nemet LoopAccessInfo::LoopAccessInfo(Loop *L, ScalarEvolution *SE,
19683bfd93d7SAdam Nemet                                const TargetLibraryInfo *TLI, AliasAnalysis *AA,
1969a9f09c62SAdam Nemet                                DominatorTree *DT, LoopInfo *LI)
197094734eefSXinliang David Li     : PSE(llvm::make_unique<PredicatedScalarEvolution>(*SE, *L)),
1971ce030acbSXinliang David Li       PtrRtChecking(llvm::make_unique<RuntimePointerChecking>(SE)),
197294734eefSXinliang David Li       DepChecker(llvm::make_unique<MemoryDepChecker>(*PSE, L)), TheLoop(L),
19737da74abfSAdam Nemet       NumLoads(0), NumStores(0), MaxSafeDepDistBytes(-1), CanVecMem(false),
19747da74abfSAdam Nemet       StoreToLoopInvariantAddress(false) {
1975929c38e8SAdam Nemet   if (canAnalyzeLoop())
19767da74abfSAdam Nemet     analyzeLoop(AA, LI, TLI, DT);
19773bfd93d7SAdam Nemet }
19783bfd93d7SAdam Nemet 
1979e91cc6efSAdam Nemet void LoopAccessInfo::print(raw_ostream &OS, unsigned Depth) const {
1980e91cc6efSAdam Nemet   if (CanVecMem) {
19814ad38b63SAdam Nemet     OS.indent(Depth) << "Memory dependences are safe";
19827afb46d3SDavid Majnemer     if (MaxSafeDepDistBytes != -1ULL)
1983c62e554eSAdam Nemet       OS << " with a maximum dependence distance of " << MaxSafeDepDistBytes
1984c62e554eSAdam Nemet          << " bytes";
1985ce030acbSXinliang David Li     if (PtrRtChecking->Need)
19864ad38b63SAdam Nemet       OS << " with run-time checks";
19874ad38b63SAdam Nemet     OS << "\n";
1988e91cc6efSAdam Nemet   }
1989e91cc6efSAdam Nemet 
1990e91cc6efSAdam Nemet   if (Report)
1991877ccee8SAdam Nemet     OS.indent(Depth) << "Report: " << Report->getMsg() << "\n";
1992e91cc6efSAdam Nemet 
1993ce030acbSXinliang David Li   if (auto *Dependences = DepChecker->getDependences()) {
1994a2df750fSAdam Nemet     OS.indent(Depth) << "Dependences:\n";
1995a2df750fSAdam Nemet     for (auto &Dep : *Dependences) {
1996ce030acbSXinliang David Li       Dep.print(OS, Depth + 2, DepChecker->getMemoryInstructions());
199758913d65SAdam Nemet       OS << "\n";
199858913d65SAdam Nemet     }
199958913d65SAdam Nemet   } else
2000a2df750fSAdam Nemet     OS.indent(Depth) << "Too many dependences, not recorded\n";
2001e91cc6efSAdam Nemet 
2002e91cc6efSAdam Nemet   // List the pair of accesses need run-time checks to prove independence.
2003ce030acbSXinliang David Li   PtrRtChecking->print(OS, Depth);
2004e91cc6efSAdam Nemet   OS << "\n";
2005c3384320SAdam Nemet 
2006c3384320SAdam Nemet   OS.indent(Depth) << "Store to invariant address was "
2007c3384320SAdam Nemet                    << (StoreToLoopInvariantAddress ? "" : "not ")
2008c3384320SAdam Nemet                    << "found in loop.\n";
2009e3c0534bSSilviu Baranga 
2010e3c0534bSSilviu Baranga   OS.indent(Depth) << "SCEV assumptions:\n";
201194734eefSXinliang David Li   PSE->getUnionPredicate().print(OS, Depth);
2012b77365b5SSilviu Baranga 
2013b77365b5SSilviu Baranga   OS << "\n";
2014b77365b5SSilviu Baranga 
2015b77365b5SSilviu Baranga   OS.indent(Depth) << "Expressions re-written:\n";
201694734eefSXinliang David Li   PSE->print(OS, Depth);
2017e91cc6efSAdam Nemet }
2018e91cc6efSAdam Nemet 
20197853c1ddSXinliang David Li const LoopAccessInfo &LoopAccessLegacyAnalysis::getInfo(Loop *L) {
20203bfd93d7SAdam Nemet   auto &LAI = LoopAccessInfoMap[L];
20213bfd93d7SAdam Nemet 
20221824e411SAdam Nemet   if (!LAI)
20231824e411SAdam Nemet     LAI = llvm::make_unique<LoopAccessInfo>(L, SE, TLI, AA, DT, LI);
20241824e411SAdam Nemet 
20253bfd93d7SAdam Nemet   return *LAI.get();
20263bfd93d7SAdam Nemet }
20273bfd93d7SAdam Nemet 
20287853c1ddSXinliang David Li void LoopAccessLegacyAnalysis::print(raw_ostream &OS, const Module *M) const {
20297853c1ddSXinliang David Li   LoopAccessLegacyAnalysis &LAA = *const_cast<LoopAccessLegacyAnalysis *>(this);
2030ecde1c7fSXinliang David Li 
2031e91cc6efSAdam Nemet   for (Loop *TopLevelLoop : *LI)
2032e91cc6efSAdam Nemet     for (Loop *L : depth_first(TopLevelLoop)) {
2033e91cc6efSAdam Nemet       OS.indent(2) << L->getHeader()->getName() << ":\n";
2034bdbc5227SAdam Nemet       auto &LAI = LAA.getInfo(L);
2035e91cc6efSAdam Nemet       LAI.print(OS, 4);
2036e91cc6efSAdam Nemet     }
2037e91cc6efSAdam Nemet }
2038e91cc6efSAdam Nemet 
20397853c1ddSXinliang David Li bool LoopAccessLegacyAnalysis::runOnFunction(Function &F) {
2040ecde1c7fSXinliang David Li   SE = &getAnalysis<ScalarEvolutionWrapperPass>().getSE();
20413bfd93d7SAdam Nemet   auto *TLIP = getAnalysisIfAvailable<TargetLibraryInfoWrapperPass>();
2042ecde1c7fSXinliang David Li   TLI = TLIP ? &TLIP->getTLI() : nullptr;
2043ecde1c7fSXinliang David Li   AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
2044ecde1c7fSXinliang David Li   DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
2045ecde1c7fSXinliang David Li   LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
20463bfd93d7SAdam Nemet 
20473bfd93d7SAdam Nemet   return false;
20483bfd93d7SAdam Nemet }
20493bfd93d7SAdam Nemet 
20507853c1ddSXinliang David Li void LoopAccessLegacyAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
20512f1fd165SChandler Carruth     AU.addRequired<ScalarEvolutionWrapperPass>();
20527b560d40SChandler Carruth     AU.addRequired<AAResultsWrapperPass>();
20533bfd93d7SAdam Nemet     AU.addRequired<DominatorTreeWrapperPass>();
2054e91cc6efSAdam Nemet     AU.addRequired<LoopInfoWrapperPass>();
20553bfd93d7SAdam Nemet 
20563bfd93d7SAdam Nemet     AU.setPreservesAll();
20573bfd93d7SAdam Nemet }
20583bfd93d7SAdam Nemet 
20597853c1ddSXinliang David Li char LoopAccessLegacyAnalysis::ID = 0;
20603bfd93d7SAdam Nemet static const char laa_name[] = "Loop Access Analysis";
20613bfd93d7SAdam Nemet #define LAA_NAME "loop-accesses"
20623bfd93d7SAdam Nemet 
20637853c1ddSXinliang David Li INITIALIZE_PASS_BEGIN(LoopAccessLegacyAnalysis, LAA_NAME, laa_name, false, true)
20647b560d40SChandler Carruth INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
20652f1fd165SChandler Carruth INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass)
20663bfd93d7SAdam Nemet INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
2067e91cc6efSAdam Nemet INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass)
20687853c1ddSXinliang David Li INITIALIZE_PASS_END(LoopAccessLegacyAnalysis, LAA_NAME, laa_name, false, true)
20693bfd93d7SAdam Nemet 
2070*dab4eae2SChandler Carruth AnalysisKey LoopAccessAnalysis::Key;
20718a021317SXinliang David Li 
20720746f3bfSSean Silva LoopAccessInfo LoopAccessAnalysis::run(Loop &L, LoopAnalysisManager &AM) {
207336e0d01eSSean Silva   const FunctionAnalysisManager &FAM =
2074284b0324SSean Silva       AM.getResult<FunctionAnalysisManagerLoopProxy>(L).getManager();
20758a021317SXinliang David Li   Function &F = *L.getHeader()->getParent();
2076284b0324SSean Silva   auto *SE = FAM.getCachedResult<ScalarEvolutionAnalysis>(F);
20778a021317SXinliang David Li   auto *TLI = FAM.getCachedResult<TargetLibraryAnalysis>(F);
2078284b0324SSean Silva   auto *AA = FAM.getCachedResult<AAManager>(F);
2079284b0324SSean Silva   auto *DT = FAM.getCachedResult<DominatorTreeAnalysis>(F);
2080284b0324SSean Silva   auto *LI = FAM.getCachedResult<LoopAnalysis>(F);
2081284b0324SSean Silva   if (!SE)
2082284b0324SSean Silva     report_fatal_error(
2083284b0324SSean Silva         "ScalarEvolution must have been cached at a higher level");
2084284b0324SSean Silva   if (!AA)
2085284b0324SSean Silva     report_fatal_error("AliasAnalysis must have been cached at a higher level");
2086284b0324SSean Silva   if (!DT)
2087284b0324SSean Silva     report_fatal_error("DominatorTree must have been cached at a higher level");
2088284b0324SSean Silva   if (!LI)
2089284b0324SSean Silva     report_fatal_error("LoopInfo must have been cached at a higher level");
20901824e411SAdam Nemet   return LoopAccessInfo(&L, SE, TLI, AA, DT, LI);
20918a021317SXinliang David Li }
20928a021317SXinliang David Li 
20938a021317SXinliang David Li PreservedAnalyses LoopAccessInfoPrinterPass::run(Loop &L,
20940746f3bfSSean Silva                                                  LoopAnalysisManager &AM) {
20958a021317SXinliang David Li   Function &F = *L.getHeader()->getParent();
209607e08fa3SXinliang David Li   auto &LAI = AM.getResult<LoopAccessAnalysis>(L);
20978a021317SXinliang David Li   OS << "Loop access info in function '" << F.getName() << "':\n";
20988a021317SXinliang David Li   OS.indent(2) << L.getHeader()->getName() << ":\n";
20998a021317SXinliang David Li   LAI.print(OS, 4);
21008a021317SXinliang David Li   return PreservedAnalyses::all();
21018a021317SXinliang David Li }
21028a021317SXinliang David Li 
21033bfd93d7SAdam Nemet namespace llvm {
21043bfd93d7SAdam Nemet   Pass *createLAAPass() {
21057853c1ddSXinliang David Li     return new LoopAccessLegacyAnalysis();
21063bfd93d7SAdam Nemet   }
21073bfd93d7SAdam Nemet }
2108