1 //===- LocalAliasAnalysis.cpp - Local stateless alias Analysis for MLIR ---===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "mlir/Analysis/AliasAnalysis/LocalAliasAnalysis.h"
10 
11 #include "mlir/IR/FunctionSupport.h"
12 #include "mlir/IR/Matchers.h"
13 #include "mlir/Interfaces/ControlFlowInterfaces.h"
14 #include "mlir/Interfaces/SideEffectInterfaces.h"
15 #include "mlir/Interfaces/ViewLikeInterface.h"
16 
17 using namespace mlir;
18 
19 //===----------------------------------------------------------------------===//
20 // Underlying Address Computation
21 //===----------------------------------------------------------------------===//
22 
23 /// The maximum depth that will be searched when trying to find an underlying
24 /// value.
25 static constexpr unsigned maxUnderlyingValueSearchDepth = 10;
26 
27 /// Given a value, collect all of the underlying values being addressed.
28 static void collectUnderlyingAddressValues(Value value, unsigned maxDepth,
29                                            DenseSet<Value> &visited,
30                                            SmallVectorImpl<Value> &output);
31 
32 /// Given a successor (`region`) of a RegionBranchOpInterface, collect all of
33 /// the underlying values being addressed by one of the successor inputs. If the
34 /// provided `region` is null, as per `RegionBranchOpInterface` this represents
35 /// the parent operation.
36 static void collectUnderlyingAddressValues(RegionBranchOpInterface branch,
37                                            Region *region, Value inputValue,
38                                            unsigned inputIndex,
39                                            unsigned maxDepth,
40                                            DenseSet<Value> &visited,
41                                            SmallVectorImpl<Value> &output) {
42   // Given the index of a region of the branch (`predIndex`), or None to
43   // represent the parent operation, try to return the index into the outputs of
44   // this region predecessor that correspond to the input values of `region`. If
45   // an index could not be found, None is returned instead.
46   auto getOperandIndexIfPred =
47       [&](Optional<unsigned> predIndex) -> Optional<unsigned> {
48     SmallVector<RegionSuccessor, 2> successors;
49     branch.getSuccessorRegions(predIndex, successors);
50     for (RegionSuccessor &successor : successors) {
51       if (successor.getSuccessor() != region)
52         continue;
53       // Check that the successor inputs map to the given input value.
54       ValueRange inputs = successor.getSuccessorInputs();
55       if (inputs.empty()) {
56         output.push_back(inputValue);
57         break;
58       }
59       unsigned firstInputIndex, lastInputIndex;
60       if (region) {
61         firstInputIndex = inputs[0].cast<BlockArgument>().getArgNumber();
62         lastInputIndex = inputs.back().cast<BlockArgument>().getArgNumber();
63       } else {
64         firstInputIndex = inputs[0].cast<OpResult>().getResultNumber();
65         lastInputIndex = inputs.back().cast<OpResult>().getResultNumber();
66       }
67       if (firstInputIndex > inputIndex || lastInputIndex < inputIndex) {
68         output.push_back(inputValue);
69         break;
70       }
71       return inputIndex - firstInputIndex;
72     }
73     return llvm::None;
74   };
75 
76   // Check branches from the parent operation.
77   if (region) {
78     if (Optional<unsigned> operandIndex =
79             getOperandIndexIfPred(/*predIndex=*/llvm::None)) {
80       collectUnderlyingAddressValues(
81           branch.getSuccessorEntryOperands(
82               region->getRegionNumber())[*operandIndex],
83           maxDepth, visited, output);
84     }
85   }
86   // Check branches from each child region.
87   Operation *op = branch.getOperation();
88   for (int i = 0, e = op->getNumRegions(); i != e; ++i) {
89     if (Optional<unsigned> operandIndex = getOperandIndexIfPred(i)) {
90       for (Block &block : op->getRegion(i)) {
91         Operation *term = block.getTerminator();
92         if (term->hasTrait<OpTrait::ReturnLike>()) {
93           collectUnderlyingAddressValues(term->getOperand(*operandIndex),
94                                          maxDepth, visited, output);
95         } else if (term->getNumSuccessors()) {
96           // Otherwise, if this terminator may exit the region we can't make
97           // any assumptions about which values get passed.
98           output.push_back(inputValue);
99           return;
100         }
101       }
102     }
103   }
104 }
105 
106 /// Given a result, collect all of the underlying values being addressed.
107 static void collectUnderlyingAddressValues(OpResult result, unsigned maxDepth,
108                                            DenseSet<Value> &visited,
109                                            SmallVectorImpl<Value> &output) {
110   Operation *op = result.getOwner();
111 
112   // If this is a view, unwrap to the source.
113   if (ViewLikeOpInterface view = dyn_cast<ViewLikeOpInterface>(op))
114     return collectUnderlyingAddressValues(view.getViewSource(), maxDepth,
115                                           visited, output);
116   // Check to see if we can reason about the control flow of this op.
117   if (auto branch = dyn_cast<RegionBranchOpInterface>(op)) {
118     return collectUnderlyingAddressValues(branch, /*region=*/nullptr, result,
119                                           result.getResultNumber(), maxDepth,
120                                           visited, output);
121   }
122 
123   output.push_back(result);
124 }
125 
126 /// Given a block argument, collect all of the underlying values being
127 /// addressed.
128 static void collectUnderlyingAddressValues(BlockArgument arg, unsigned maxDepth,
129                                            DenseSet<Value> &visited,
130                                            SmallVectorImpl<Value> &output) {
131   Block *block = arg.getOwner();
132   unsigned argNumber = arg.getArgNumber();
133 
134   // Handle the case of a non-entry block.
135   if (!block->isEntryBlock()) {
136     for (auto it = block->pred_begin(), e = block->pred_end(); it != e; ++it) {
137       auto branch = dyn_cast<BranchOpInterface>((*it)->getTerminator());
138       if (!branch) {
139         // We can't analyze the control flow, so bail out early.
140         output.push_back(arg);
141         return;
142       }
143 
144       // Try to get the operand passed for this argument.
145       unsigned index = it.getSuccessorIndex();
146       Optional<OperandRange> operands = branch.getSuccessorOperands(index);
147       if (!operands) {
148         // We can't analyze the control flow, so bail out early.
149         output.push_back(arg);
150         return;
151       }
152       collectUnderlyingAddressValues((*operands)[argNumber], maxDepth, visited,
153                                      output);
154     }
155     return;
156   }
157 
158   // Otherwise, check to see if we can reason about the control flow of this op.
159   Region *region = block->getParent();
160   Operation *op = region->getParentOp();
161   if (auto branch = dyn_cast<RegionBranchOpInterface>(op)) {
162     return collectUnderlyingAddressValues(branch, region, arg, argNumber,
163                                           maxDepth, visited, output);
164   }
165 
166   // We can't reason about the underlying address of this argument.
167   output.push_back(arg);
168 }
169 
170 /// Given a value, collect all of the underlying values being addressed.
171 static void collectUnderlyingAddressValues(Value value, unsigned maxDepth,
172                                            DenseSet<Value> &visited,
173                                            SmallVectorImpl<Value> &output) {
174   // Check that we don't infinitely recurse.
175   if (!visited.insert(value).second)
176     return;
177   if (maxDepth == 0) {
178     output.push_back(value);
179     return;
180   }
181   --maxDepth;
182 
183   if (BlockArgument arg = value.dyn_cast<BlockArgument>())
184     return collectUnderlyingAddressValues(arg, maxDepth, visited, output);
185   collectUnderlyingAddressValues(value.cast<OpResult>(), maxDepth, visited,
186                                  output);
187 }
188 
189 /// Given a value, collect all of the underlying values being addressed.
190 static void collectUnderlyingAddressValues(Value value,
191                                            SmallVectorImpl<Value> &output) {
192   DenseSet<Value> visited;
193   collectUnderlyingAddressValues(value, maxUnderlyingValueSearchDepth, visited,
194                                  output);
195 }
196 
197 //===----------------------------------------------------------------------===//
198 // LocalAliasAnalysis: alias
199 //===----------------------------------------------------------------------===//
200 
201 /// Given a value, try to get an allocation effect attached to it. If
202 /// successful, `allocEffect` is populated with the effect. If an effect was
203 /// found, `allocScopeOp` is also specified if a parent operation of `value`
204 /// could be identified that bounds the scope of the allocated value; i.e. if
205 /// non-null it specifies the parent operation that the allocation does not
206 /// escape. If no scope is found, `allocScopeOp` is set to nullptr.
207 static LogicalResult
208 getAllocEffectFor(Value value, Optional<MemoryEffects::EffectInstance> &effect,
209                   Operation *&allocScopeOp) {
210   // Try to get a memory effect interface for the parent operation.
211   Operation *op;
212   if (BlockArgument arg = value.dyn_cast<BlockArgument>())
213     op = arg.getOwner()->getParentOp();
214   else
215     op = value.cast<OpResult>().getOwner();
216   MemoryEffectOpInterface interface = dyn_cast<MemoryEffectOpInterface>(op);
217   if (!interface)
218     return failure();
219 
220   // Try to find an allocation effect on the resource.
221   if (!(effect = interface.getEffectOnValue<MemoryEffects::Allocate>(value)))
222     return failure();
223 
224   // If we found an allocation effect, try to find a scope for the allocation.
225   // If the resource of this allocation is automatically scoped, find the parent
226   // operation that bounds the allocation scope.
227   if (llvm::isa<SideEffects::AutomaticAllocationScopeResource>(
228           effect->getResource())) {
229     allocScopeOp = op->getParentWithTrait<OpTrait::AutomaticAllocationScope>();
230     return success();
231   }
232 
233   // TODO: Here we could look at the users to see if the resource is either
234   // freed on all paths within the region, or is just not captured by anything.
235   // For now assume allocation scope to the function scope (we don't care if
236   // pointer escape outside function).
237   allocScopeOp = op->getParentWithTrait<OpTrait::FunctionLike>();
238   return success();
239 }
240 
241 /// Given the two values, return their aliasing behavior.
242 static AliasResult aliasImpl(Value lhs, Value rhs) {
243   if (lhs == rhs)
244     return AliasResult::MustAlias;
245   Operation *lhsAllocScope = nullptr, *rhsAllocScope = nullptr;
246   Optional<MemoryEffects::EffectInstance> lhsAlloc, rhsAlloc;
247 
248   // Handle the case where lhs is a constant.
249   Attribute lhsAttr, rhsAttr;
250   if (matchPattern(lhs, m_Constant(&lhsAttr))) {
251     // TODO: This is overly conservative. Two matching constants don't
252     // necessarily map to the same address. For example, if the two values
253     // correspond to different symbols that both represent a definition.
254     if (matchPattern(rhs, m_Constant(&rhsAttr)))
255       return AliasResult::MayAlias;
256 
257     // Try to find an alloc effect on rhs. If an effect was found we can't
258     // alias, otherwise we might.
259     return succeeded(getAllocEffectFor(rhs, rhsAlloc, rhsAllocScope))
260                ? AliasResult::NoAlias
261                : AliasResult::MayAlias;
262   }
263   // Handle the case where rhs is a constant.
264   if (matchPattern(rhs, m_Constant(&rhsAttr))) {
265     // Try to find an alloc effect on lhs. If an effect was found we can't
266     // alias, otherwise we might.
267     return succeeded(getAllocEffectFor(lhs, lhsAlloc, lhsAllocScope))
268                ? AliasResult::NoAlias
269                : AliasResult::MayAlias;
270   }
271 
272   // Otherwise, neither of the values are constant so check to see if either has
273   // an allocation effect.
274   bool lhsHasAlloc = succeeded(getAllocEffectFor(lhs, lhsAlloc, lhsAllocScope));
275   bool rhsHasAlloc = succeeded(getAllocEffectFor(rhs, rhsAlloc, rhsAllocScope));
276   if (lhsHasAlloc == rhsHasAlloc) {
277     // If both values have an allocation effect we know they don't alias, and if
278     // neither have an effect we can't make an assumptions.
279     return lhsHasAlloc ? AliasResult::NoAlias : AliasResult::MayAlias;
280   }
281 
282   // When we reach this point we have one value with a known allocation effect,
283   // and one without. Move the one with the effect to the lhs to make the next
284   // checks simpler.
285   if (rhsHasAlloc) {
286     std::swap(lhs, rhs);
287     lhsAlloc = rhsAlloc;
288     lhsAllocScope = rhsAllocScope;
289   }
290 
291   // If the effect has a scoped allocation region, check to see if the
292   // non-effect value is defined above that scope.
293   if (lhsAllocScope) {
294     // If the parent operation of rhs is an ancestor of the allocation scope, or
295     // if rhs is an entry block argument of the allocation scope we know the two
296     // values can't alias.
297     Operation *rhsParentOp = rhs.getParentRegion()->getParentOp();
298     if (rhsParentOp->isProperAncestor(lhsAllocScope))
299       return AliasResult::NoAlias;
300     if (rhsParentOp == lhsAllocScope) {
301       BlockArgument rhsArg = rhs.dyn_cast<BlockArgument>();
302       if (rhsArg && rhs.getParentBlock()->isEntryBlock())
303         return AliasResult::NoAlias;
304     }
305   }
306 
307   // If we couldn't reason about the relationship between the two values,
308   // conservatively assume they might alias.
309   return AliasResult::MayAlias;
310 }
311 
312 /// Given the two values, return their aliasing behavior.
313 AliasResult LocalAliasAnalysis::alias(Value lhs, Value rhs) {
314   if (lhs == rhs)
315     return AliasResult::MustAlias;
316 
317   // Get the underlying values being addressed.
318   SmallVector<Value, 8> lhsValues, rhsValues;
319   collectUnderlyingAddressValues(lhs, lhsValues);
320   collectUnderlyingAddressValues(rhs, rhsValues);
321 
322   // If we failed to collect for either of the values somehow, conservatively
323   // assume they may alias.
324   if (lhsValues.empty() || rhsValues.empty())
325     return AliasResult::MayAlias;
326 
327   // Check the alias results against each of the underlying values.
328   Optional<AliasResult> result;
329   for (Value lhsVal : lhsValues) {
330     for (Value rhsVal : rhsValues) {
331       AliasResult nextResult = aliasImpl(lhsVal, rhsVal);
332       result = result ? result->merge(nextResult) : nextResult;
333     }
334   }
335 
336   // We should always have a valid result here.
337   return *result;
338 }
339 
340 //===----------------------------------------------------------------------===//
341 // LocalAliasAnalysis: getModRef
342 //===----------------------------------------------------------------------===//
343 
344 ModRefResult LocalAliasAnalysis::getModRef(Operation *op, Value location) {
345   // Check to see if this operation relies on nested side effects.
346   if (op->hasTrait<OpTrait::HasRecursiveSideEffects>()) {
347     // TODO: To check recursive operations we need to check all of the nested
348     // operations, which can result in a quadratic number of queries. We should
349     // introduce some caching of some kind to help alleviate this, especially as
350     // this caching could be used in other areas of the codebase (e.g. when
351     // checking `wouldOpBeTriviallyDead`).
352     return ModRefResult::getModAndRef();
353   }
354 
355   // Otherwise, check to see if this operation has a memory effect interface.
356   MemoryEffectOpInterface interface = dyn_cast<MemoryEffectOpInterface>(op);
357   if (!interface)
358     return ModRefResult::getModAndRef();
359 
360   // Build a ModRefResult by merging the behavior of the effects of this
361   // operation.
362   SmallVector<MemoryEffects::EffectInstance> effects;
363   interface.getEffects(effects);
364 
365   ModRefResult result = ModRefResult::getNoModRef();
366   for (const MemoryEffects::EffectInstance &effect : effects) {
367     if (isa<MemoryEffects::Allocate, MemoryEffects::Free>(effect.getEffect()))
368       continue;
369 
370     // Check for an alias between the effect and our memory location.
371     // TODO: Add support for checking an alias with a symbol reference.
372     AliasResult aliasResult = AliasResult::MayAlias;
373     if (Value effectValue = effect.getValue())
374       aliasResult = alias(effectValue, location);
375 
376     // If we don't alias, ignore this effect.
377     if (aliasResult.isNo())
378       continue;
379 
380     // Merge in the corresponding mod or ref for this effect.
381     if (isa<MemoryEffects::Read>(effect.getEffect())) {
382       result = result.merge(ModRefResult::getRef());
383     } else {
384       assert(isa<MemoryEffects::Write>(effect.getEffect()));
385       result = result.merge(ModRefResult::getMod());
386     }
387     if (result.isModAndRef())
388       break;
389   }
390   return result;
391 }
392