1 //===- Visitors.cpp - MLIR Visitor Utilities ------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8
9 #include "mlir/IR/Visitors.h"
10 #include "mlir/IR/Operation.h"
11
12 using namespace mlir;
13
WalkStage(Operation * op)14 WalkStage::WalkStage(Operation *op)
15 : numRegions(op->getNumRegions()), nextRegion(0) {}
16
17 /// Walk all of the regions/blocks/operations nested under and including the
18 /// given operation. Regions, blocks and operations at the same nesting level
19 /// are visited in lexicographical order. The walk order for enclosing regions,
20 /// blocks and operations with respect to their nested ones is specified by
21 /// 'order'. These methods are invoked for void-returning callbacks. A callback
22 /// on a block or operation is allowed to erase that block or operation only if
23 /// the walk is in post-order. See non-void method for pre-order erasure.
walk(Operation * op,function_ref<void (Region *)> callback,WalkOrder order)24 void detail::walk(Operation *op, function_ref<void(Region *)> callback,
25 WalkOrder order) {
26 // We don't use early increment for regions because they can't be erased from
27 // a callback.
28 for (auto ®ion : op->getRegions()) {
29 if (order == WalkOrder::PreOrder)
30 callback(®ion);
31 for (auto &block : region) {
32 for (auto &nestedOp : block)
33 walk(&nestedOp, callback, order);
34 }
35 if (order == WalkOrder::PostOrder)
36 callback(®ion);
37 }
38 }
39
walk(Operation * op,function_ref<void (Block *)> callback,WalkOrder order)40 void detail::walk(Operation *op, function_ref<void(Block *)> callback,
41 WalkOrder order) {
42 for (auto ®ion : op->getRegions()) {
43 // Early increment here in the case where the block is erased.
44 for (auto &block : llvm::make_early_inc_range(region)) {
45 if (order == WalkOrder::PreOrder)
46 callback(&block);
47 for (auto &nestedOp : block)
48 walk(&nestedOp, callback, order);
49 if (order == WalkOrder::PostOrder)
50 callback(&block);
51 }
52 }
53 }
54
walk(Operation * op,function_ref<void (Operation *)> callback,WalkOrder order)55 void detail::walk(Operation *op, function_ref<void(Operation *)> callback,
56 WalkOrder order) {
57 if (order == WalkOrder::PreOrder)
58 callback(op);
59
60 // TODO: This walk should be iterative over the operations.
61 for (auto ®ion : op->getRegions()) {
62 for (auto &block : region) {
63 // Early increment here in the case where the operation is erased.
64 for (auto &nestedOp : llvm::make_early_inc_range(block))
65 walk(&nestedOp, callback, order);
66 }
67 }
68
69 if (order == WalkOrder::PostOrder)
70 callback(op);
71 }
72
walk(Operation * op,function_ref<void (Operation *,const WalkStage &)> callback)73 void detail::walk(Operation *op,
74 function_ref<void(Operation *, const WalkStage &)> callback) {
75 WalkStage stage(op);
76
77 for (Region ®ion : op->getRegions()) {
78 // Invoke callback on the parent op before visiting each child region.
79 callback(op, stage);
80 stage.advance();
81
82 for (Block &block : region) {
83 for (Operation &nestedOp : block)
84 walk(&nestedOp, callback);
85 }
86 }
87
88 // Invoke callback after all regions have been visited.
89 callback(op, stage);
90 }
91
92 /// Walk all of the regions/blocks/operations nested under and including the
93 /// given operation. These functions walk operations until an interrupt result
94 /// is returned by the callback. Walks on regions, blocks and operations may
95 /// also be skipped if the callback returns a skip result. Regions, blocks and
96 /// operations at the same nesting level are visited in lexicographical order.
97 /// The walk order for enclosing regions, blocks and operations with respect to
98 /// their nested ones is specified by 'order'. A callback on a block or
99 /// operation is allowed to erase that block or operation if either:
100 /// * the walk is in post-order, or
101 /// * the walk is in pre-order and the walk is skipped after the erasure.
walk(Operation * op,function_ref<WalkResult (Region *)> callback,WalkOrder order)102 WalkResult detail::walk(Operation *op,
103 function_ref<WalkResult(Region *)> callback,
104 WalkOrder order) {
105 // We don't use early increment for regions because they can't be erased from
106 // a callback.
107 for (auto ®ion : op->getRegions()) {
108 if (order == WalkOrder::PreOrder) {
109 WalkResult result = callback(®ion);
110 if (result.wasSkipped())
111 continue;
112 if (result.wasInterrupted())
113 return WalkResult::interrupt();
114 }
115 for (auto &block : region) {
116 for (auto &nestedOp : block)
117 if (walk(&nestedOp, callback, order).wasInterrupted())
118 return WalkResult::interrupt();
119 }
120 if (order == WalkOrder::PostOrder) {
121 if (callback(®ion).wasInterrupted())
122 return WalkResult::interrupt();
123 // We don't check if this region was skipped because its walk already
124 // finished and the walk will continue with the next region.
125 }
126 }
127 return WalkResult::advance();
128 }
129
walk(Operation * op,function_ref<WalkResult (Block *)> callback,WalkOrder order)130 WalkResult detail::walk(Operation *op,
131 function_ref<WalkResult(Block *)> callback,
132 WalkOrder order) {
133 for (auto ®ion : op->getRegions()) {
134 // Early increment here in the case where the block is erased.
135 for (auto &block : llvm::make_early_inc_range(region)) {
136 if (order == WalkOrder::PreOrder) {
137 WalkResult result = callback(&block);
138 if (result.wasSkipped())
139 continue;
140 if (result.wasInterrupted())
141 return WalkResult::interrupt();
142 }
143 for (auto &nestedOp : block)
144 if (walk(&nestedOp, callback, order).wasInterrupted())
145 return WalkResult::interrupt();
146 if (order == WalkOrder::PostOrder) {
147 if (callback(&block).wasInterrupted())
148 return WalkResult::interrupt();
149 // We don't check if this block was skipped because its walk already
150 // finished and the walk will continue with the next block.
151 }
152 }
153 }
154 return WalkResult::advance();
155 }
156
walk(Operation * op,function_ref<WalkResult (Operation *)> callback,WalkOrder order)157 WalkResult detail::walk(Operation *op,
158 function_ref<WalkResult(Operation *)> callback,
159 WalkOrder order) {
160 if (order == WalkOrder::PreOrder) {
161 WalkResult result = callback(op);
162 // If skipped, caller will continue the walk on the next operation.
163 if (result.wasSkipped())
164 return WalkResult::advance();
165 if (result.wasInterrupted())
166 return WalkResult::interrupt();
167 }
168
169 // TODO: This walk should be iterative over the operations.
170 for (auto ®ion : op->getRegions()) {
171 for (auto &block : region) {
172 // Early increment here in the case where the operation is erased.
173 for (auto &nestedOp : llvm::make_early_inc_range(block)) {
174 if (walk(&nestedOp, callback, order).wasInterrupted())
175 return WalkResult::interrupt();
176 }
177 }
178 }
179
180 if (order == WalkOrder::PostOrder)
181 return callback(op);
182 return WalkResult::advance();
183 }
184
walk(Operation * op,function_ref<WalkResult (Operation *,const WalkStage &)> callback)185 WalkResult detail::walk(
186 Operation *op,
187 function_ref<WalkResult(Operation *, const WalkStage &)> callback) {
188 WalkStage stage(op);
189
190 for (Region ®ion : op->getRegions()) {
191 // Invoke callback on the parent op before visiting each child region.
192 WalkResult result = callback(op, stage);
193
194 if (result.wasSkipped())
195 return WalkResult::advance();
196 if (result.wasInterrupted())
197 return WalkResult::interrupt();
198
199 stage.advance();
200
201 for (Block &block : region) {
202 // Early increment here in the case where the operation is erased.
203 for (Operation &nestedOp : llvm::make_early_inc_range(block))
204 if (walk(&nestedOp, callback).wasInterrupted())
205 return WalkResult::interrupt();
206 }
207 }
208 return callback(op, stage);
209 }
210