1 //===- Visitors.cpp - MLIR Visitor Utilities ------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "mlir/IR/Visitors.h"
10 #include "mlir/IR/Operation.h"
11 
12 using namespace mlir;
13 
14 WalkStage::WalkStage(Operation *op)
15     : numRegions(op->getNumRegions()), nextRegion(0) {}
16 
17 /// Walk all of the regions/blocks/operations nested under and including the
18 /// given operation. Regions, blocks and operations at the same nesting level
19 /// are visited in lexicographical order. The walk order for enclosing regions,
20 /// blocks and operations with respect to their nested ones is specified by
21 /// 'order'. These methods are invoked for void-returning callbacks. A callback
22 /// on a block or operation is allowed to erase that block or operation only if
23 /// the walk is in post-order. See non-void method for pre-order erasure.
24 void detail::walk(Operation *op, function_ref<void(Region *)> callback,
25                   WalkOrder order) {
26   // We don't use early increment for regions because they can't be erased from
27   // a callback.
28   for (auto &region : op->getRegions()) {
29     if (order == WalkOrder::PreOrder)
30       callback(&region);
31     for (auto &block : region) {
32       for (auto &nestedOp : block)
33         walk(&nestedOp, callback, order);
34     }
35     if (order == WalkOrder::PostOrder)
36       callback(&region);
37   }
38 }
39 
40 void detail::walk(Operation *op, function_ref<void(Block *)> callback,
41                   WalkOrder order) {
42   for (auto &region : op->getRegions()) {
43     // Early increment here in the case where the block is erased.
44     for (auto &block : llvm::make_early_inc_range(region)) {
45       if (order == WalkOrder::PreOrder)
46         callback(&block);
47       for (auto &nestedOp : block)
48         walk(&nestedOp, callback, order);
49       if (order == WalkOrder::PostOrder)
50         callback(&block);
51     }
52   }
53 }
54 
55 void detail::walk(Operation *op, function_ref<void(Operation *)> callback,
56                   WalkOrder order) {
57   if (order == WalkOrder::PreOrder)
58     callback(op);
59 
60   // TODO: This walk should be iterative over the operations.
61   for (auto &region : op->getRegions()) {
62     for (auto &block : region) {
63       // Early increment here in the case where the operation is erased.
64       for (auto &nestedOp : llvm::make_early_inc_range(block))
65         walk(&nestedOp, callback, order);
66     }
67   }
68 
69   if (order == WalkOrder::PostOrder)
70     callback(op);
71 }
72 
73 void detail::walk(Operation *op,
74                   function_ref<void(Operation *, const WalkStage &)> callback) {
75   WalkStage stage(op);
76 
77   for (Region &region : op->getRegions()) {
78     // Invoke callback on the parent op before visiting each child region.
79     callback(op, stage);
80     stage.advance();
81 
82     for (Block &block : region) {
83       for (Operation &nestedOp : block)
84         walk(&nestedOp, callback);
85     }
86   }
87 
88   // Invoke callback after all regions have been visited.
89   callback(op, stage);
90 }
91 
92 /// Walk all of the regions/blocks/operations nested under and including the
93 /// given operation. These functions walk operations until an interrupt result
94 /// is returned by the callback. Walks on regions, blocks and operations may
95 /// also be skipped if the callback returns a skip result. Regions, blocks and
96 /// operations at the same nesting level are visited in lexicographical order.
97 /// The walk order for enclosing regions, blocks and operations with respect to
98 /// their nested ones is specified by 'order'. A callback on a block or
99 /// operation is allowed to erase that block or operation if either:
100 ///   * the walk is in post-order, or
101 ///   * the walk is in pre-order and the walk is skipped after the erasure.
102 WalkResult detail::walk(Operation *op,
103                         function_ref<WalkResult(Region *)> callback,
104                         WalkOrder order) {
105   // We don't use early increment for regions because they can't be erased from
106   // a callback.
107   for (auto &region : op->getRegions()) {
108     if (order == WalkOrder::PreOrder) {
109       WalkResult result = callback(&region);
110       if (result.wasSkipped())
111         continue;
112       if (result.wasInterrupted())
113         return WalkResult::interrupt();
114     }
115     for (auto &block : region) {
116       for (auto &nestedOp : block)
117         walk(&nestedOp, callback, order);
118     }
119     if (order == WalkOrder::PostOrder) {
120       if (callback(&region).wasInterrupted())
121         return WalkResult::interrupt();
122       // We don't check if this region was skipped because its walk already
123       // finished and the walk will continue with the next region.
124     }
125   }
126   return WalkResult::advance();
127 }
128 
129 WalkResult detail::walk(Operation *op,
130                         function_ref<WalkResult(Block *)> callback,
131                         WalkOrder order) {
132   for (auto &region : op->getRegions()) {
133     // Early increment here in the case where the block is erased.
134     for (auto &block : llvm::make_early_inc_range(region)) {
135       if (order == WalkOrder::PreOrder) {
136         WalkResult result = callback(&block);
137         if (result.wasSkipped())
138           continue;
139         if (result.wasInterrupted())
140           return WalkResult::interrupt();
141       }
142       for (auto &nestedOp : block)
143         walk(&nestedOp, callback, order);
144       if (order == WalkOrder::PostOrder) {
145         if (callback(&block).wasInterrupted())
146           return WalkResult::interrupt();
147         // We don't check if this block was skipped because its walk already
148         // finished and the walk will continue with the next block.
149       }
150     }
151   }
152   return WalkResult::advance();
153 }
154 
155 WalkResult detail::walk(Operation *op,
156                         function_ref<WalkResult(Operation *)> callback,
157                         WalkOrder order) {
158   if (order == WalkOrder::PreOrder) {
159     WalkResult result = callback(op);
160     // If skipped, caller will continue the walk on the next operation.
161     if (result.wasSkipped())
162       return WalkResult::advance();
163     if (result.wasInterrupted())
164       return WalkResult::interrupt();
165   }
166 
167   // TODO: This walk should be iterative over the operations.
168   for (auto &region : op->getRegions()) {
169     for (auto &block : region) {
170       // Early increment here in the case where the operation is erased.
171       for (auto &nestedOp : llvm::make_early_inc_range(block)) {
172         if (walk(&nestedOp, callback, order).wasInterrupted())
173           return WalkResult::interrupt();
174       }
175     }
176   }
177 
178   if (order == WalkOrder::PostOrder)
179     return callback(op);
180   return WalkResult::advance();
181 }
182 
183 WalkResult detail::walk(
184     Operation *op,
185     function_ref<WalkResult(Operation *, const WalkStage &)> callback) {
186   WalkStage stage(op);
187 
188   for (Region &region : op->getRegions()) {
189     // Invoke callback on the parent op before visiting each child region.
190     WalkResult result = callback(op, stage);
191 
192     if (result.wasSkipped())
193       return WalkResult::advance();
194     if (result.wasInterrupted())
195       return WalkResult::interrupt();
196 
197     stage.advance();
198 
199     for (Block &block : region) {
200       // Early increment here in the case where the operation is erased.
201       for (Operation &nestedOp : llvm::make_early_inc_range(block))
202         if (walk(&nestedOp, callback).wasInterrupted())
203           return WalkResult::interrupt();
204     }
205   }
206   return callback(op, stage);
207 }
208