1 //===- NestedMacher.h - Nested matcher for Function -------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8
9 #ifndef MLIR_DIALECT_AFFINE_ANALYSIS_NESTEDMATCHER_H
10 #define MLIR_DIALECT_AFFINE_ANALYSIS_NESTEDMATCHER_H
11
12 #include "mlir/IR/BuiltinOps.h"
13 #include "mlir/IR/Operation.h"
14 #include "llvm/Support/Allocator.h"
15
16 namespace mlir {
17
18 class NestedPattern;
19 class Operation;
20
21 /// An NestedPattern captures nested patterns in the IR.
22 /// It is used in conjunction with a scoped NestedPatternContext which is an
23 /// llvm::BumpPtrAllocator that handles memory allocations efficiently and
24 /// avoids ownership issues.
25 ///
26 /// In order to use NestedPatterns, first create a scoped context.
27 /// When the context goes out of scope, everything is freed.
28 /// This design simplifies the API by avoiding references to the context and
29 /// makes it clear that references to matchers must not escape.
30 ///
31 /// Example:
32 /// {
33 /// NestedPatternContext context;
34 /// auto gemmLike = Doall(Doall(Red(LoadStores())));
35 /// auto matches = gemmLike.match(f);
36 /// // do work on matches
37 /// } // everything is freed
38 ///
39 ///
40 /// Nested abstraction for matching results.
41 /// Provides access to the nested Operation* captured by a Matcher.
42 ///
43 /// A NestedMatch contains an Operation* and the children NestedMatch and is
44 /// thus cheap to copy. NestedMatch is stored in a scoped bumper allocator whose
45 /// lifetime is managed by an RAII NestedPatternContext.
46 class NestedMatch {
47 public:
48 static NestedMatch build(Operation *operation,
49 ArrayRef<NestedMatch> nestedMatches);
50 NestedMatch(const NestedMatch &) = default;
51 NestedMatch &operator=(const NestedMatch &) = default;
52
53 explicit operator bool() { return matchedOperation != nullptr; }
54
getMatchedOperation()55 Operation *getMatchedOperation() const { return matchedOperation; }
getMatchedChildren()56 ArrayRef<NestedMatch> getMatchedChildren() { return matchedChildren; }
57
58 private:
59 friend class NestedPattern;
60 friend class NestedPatternContext;
61
62 /// Underlying global bump allocator managed by a NestedPatternContext.
63 static llvm::BumpPtrAllocator *&allocator();
64
65 NestedMatch() = default;
66
67 /// Payload, holds a NestedMatch and all its children along this branch.
68 Operation *matchedOperation = nullptr;
69 ArrayRef<NestedMatch> matchedChildren;
70 };
71
72 /// A NestedPattern is a nested operation walker that:
73 /// 1. recursively matches a substructure in the tree;
74 /// 2. uses a filter function to refine matches with extra semantic
75 /// constraints (passed via a lambda of type FilterFunctionType);
76 /// 3. TODO: optionally applies actions (lambda).
77 ///
78 /// Nested patterns are meant to capture imperfectly nested loops while matching
79 /// properties over the whole loop nest. For instance, in vectorization we are
80 /// interested in capturing all the imperfectly nested loops of a certain type
81 /// and such that all the load and stores have certain access patterns along the
82 /// loops' induction variables). Such NestedMatches are first captured using the
83 /// `match` function and are later processed to analyze properties and apply
84 /// transformations in a non-greedy way.
85 ///
86 /// The NestedMatches captured in the IR can grow large, especially after
87 /// aggressive unrolling. As experience has shown, it is generally better to use
88 /// a plain walk over operations to match flat patterns but the current
89 /// implementation is competitive nonetheless.
90 using FilterFunctionType = std::function<bool(Operation &)>;
defaultFilterFunction(Operation &)91 inline bool defaultFilterFunction(Operation &) { return true; }
92 class NestedPattern {
93 public:
94 NestedPattern(ArrayRef<NestedPattern> nested,
95 FilterFunctionType filter = defaultFilterFunction);
96 NestedPattern(const NestedPattern &other);
97 NestedPattern &operator=(const NestedPattern &other);
98
~NestedPattern()99 ~NestedPattern() {
100 // Call destructors manually, ArrayRef is non-owning so it wouldn't call
101 // them, but we should free the memory allocated by std::function outside of
102 // the arena allocator.
103 freeNested();
104 }
105
106 /// Returns all the top-level matches in `op`.
match(Operation * op,SmallVectorImpl<NestedMatch> * matches)107 void match(Operation *op, SmallVectorImpl<NestedMatch> *matches) {
108 op->walk([&](Operation *child) { matchOne(child, matches); });
109 }
110
111 /// Returns the depth of the pattern.
112 unsigned getDepth() const;
113
114 private:
115 friend class NestedPatternContext;
116 friend class NestedMatch;
117 friend struct State;
118
119 /// Copies the list of nested patterns to the arena allocator associated with
120 /// this pattern.
121 void copyNestedToThis(ArrayRef<NestedPattern> nested);
122
123 /// Calls destructors on nested patterns.
124 void freeNested();
125
126 /// Underlying global bump allocator managed by a NestedPatternContext.
127 static llvm::BumpPtrAllocator *&allocator();
128
129 /// Matches this pattern against a single `op` and fills matches with the
130 /// result.
131 void matchOne(Operation *op, SmallVectorImpl<NestedMatch> *matches);
132
133 /// Nested patterns to be matched.
134 ArrayRef<NestedPattern> nestedPatterns;
135
136 /// Extra filter function to apply to prune patterns as the IR is walked.
137 FilterFunctionType filter;
138
139 /// skip is an implementation detail needed so that we can implement match
140 /// without switching on the type of the Operation. The idea is that a
141 /// NestedPattern first checks if it matches locally and then recursively
142 /// applies its nested matchers to its elem->nested. Since we want to rely on
143 /// the existing operation walking functionality rather than duplicate
144 /// it, we allow an off-by-one traversal to account for the fact that we
145 /// write:
146 ///
147 /// void match(Operation *elem) {
148 /// for (auto &c : getNestedPatterns()) {
149 /// NestedPattern childPattern(...);
150 /// ^~~~ Needs off-by-one skip.
151 ///
152 Operation *skip;
153 };
154
155 /// RAII structure to transparently manage the bump allocator for
156 /// NestedPattern and NestedMatch classes. This avoids passing a context to
157 /// all the API functions.
158 class NestedPatternContext {
159 public:
NestedPatternContext()160 NestedPatternContext() {
161 assert(NestedMatch::allocator() == nullptr &&
162 "Only a single NestedPatternContext is supported");
163 assert(NestedPattern::allocator() == nullptr &&
164 "Only a single NestedPatternContext is supported");
165 NestedMatch::allocator() = &allocator;
166 NestedPattern::allocator() = &allocator;
167 }
~NestedPatternContext()168 ~NestedPatternContext() {
169 NestedMatch::allocator() = nullptr;
170 NestedPattern::allocator() = nullptr;
171 }
172 llvm::BumpPtrAllocator allocator;
173 };
174
175 namespace matcher {
176 // Syntactic sugar NestedPattern builder functions.
177 NestedPattern Op(FilterFunctionType filter = defaultFilterFunction);
178 NestedPattern If(const NestedPattern &child);
179 NestedPattern If(const FilterFunctionType &filter, const NestedPattern &child);
180 NestedPattern If(ArrayRef<NestedPattern> nested = {});
181 NestedPattern If(const FilterFunctionType &filter,
182 ArrayRef<NestedPattern> nested = {});
183 NestedPattern For(const NestedPattern &child);
184 NestedPattern For(const FilterFunctionType &filter, const NestedPattern &child);
185 NestedPattern For(ArrayRef<NestedPattern> nested = {});
186 NestedPattern For(const FilterFunctionType &filter,
187 ArrayRef<NestedPattern> nested = {});
188
189 bool isParallelLoop(Operation &op);
190 bool isReductionLoop(Operation &op);
191 bool isLoadOrStore(Operation &op);
192
193 } // namespace matcher
194 } // namespace mlir
195
196 #endif // MLIR_DIALECT_AFFINE_ANALYSIS_NESTEDMATCHER_H
197