1 //===- ThreadSafetyTIL.cpp -------------------------------------*- C++ --*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT in the llvm repository for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "clang/Analysis/Analyses/ThreadSafetyTIL.h" 11 #include "clang/Analysis/Analyses/ThreadSafetyTraverse.h" 12 using namespace clang; 13 using namespace threadSafety; 14 using namespace til; 15 16 StringRef til::getUnaryOpcodeString(TIL_UnaryOpcode Op) { 17 switch (Op) { 18 case UOP_Minus: return "-"; 19 case UOP_BitNot: return "~"; 20 case UOP_LogicNot: return "!"; 21 } 22 return ""; 23 } 24 25 StringRef til::getBinaryOpcodeString(TIL_BinaryOpcode Op) { 26 switch (Op) { 27 case BOP_Mul: return "*"; 28 case BOP_Div: return "/"; 29 case BOP_Rem: return "%"; 30 case BOP_Add: return "+"; 31 case BOP_Sub: return "-"; 32 case BOP_Shl: return "<<"; 33 case BOP_Shr: return ">>"; 34 case BOP_BitAnd: return "&"; 35 case BOP_BitXor: return "^"; 36 case BOP_BitOr: return "|"; 37 case BOP_Eq: return "=="; 38 case BOP_Neq: return "!="; 39 case BOP_Lt: return "<"; 40 case BOP_Leq: return "<="; 41 case BOP_Cmp: return "<=>"; 42 case BOP_LogicAnd: return "&&"; 43 case BOP_LogicOr: return "||"; 44 } 45 return ""; 46 } 47 48 49 SExpr* Future::force() { 50 Status = FS_evaluating; 51 Result = compute(); 52 Status = FS_done; 53 return Result; 54 } 55 56 57 unsigned BasicBlock::addPredecessor(BasicBlock *Pred) { 58 unsigned Idx = Predecessors.size(); 59 Predecessors.reserveCheck(1, Arena); 60 Predecessors.push_back(Pred); 61 for (SExpr *E : Args) { 62 if (Phi* Ph = dyn_cast<Phi>(E)) { 63 Ph->values().reserveCheck(1, Arena); 64 Ph->values().push_back(nullptr); 65 } 66 } 67 return Idx; 68 } 69 70 71 void BasicBlock::reservePredecessors(unsigned NumPreds) { 72 Predecessors.reserve(NumPreds, Arena); 73 for (SExpr *E : Args) { 74 if (Phi* Ph = dyn_cast<Phi>(E)) { 75 Ph->values().reserve(NumPreds, Arena); 76 } 77 } 78 } 79 80 81 // If E is a variable, then trace back through any aliases or redundant 82 // Phi nodes to find the canonical definition. 83 const SExpr *til::getCanonicalVal(const SExpr *E) { 84 while (true) { 85 if (auto *V = dyn_cast<Variable>(E)) { 86 if (V->kind() == Variable::VK_Let) { 87 E = V->definition(); 88 continue; 89 } 90 } 91 if (const Phi *Ph = dyn_cast<Phi>(E)) { 92 if (Ph->status() == Phi::PH_SingleVal) { 93 E = Ph->values()[0]; 94 continue; 95 } 96 } 97 break; 98 } 99 return E; 100 } 101 102 103 // If E is a variable, then trace back through any aliases or redundant 104 // Phi nodes to find the canonical definition. 105 // The non-const version will simplify incomplete Phi nodes. 106 SExpr *til::simplifyToCanonicalVal(SExpr *E) { 107 while (true) { 108 if (auto *V = dyn_cast<Variable>(E)) { 109 if (V->kind() != Variable::VK_Let) 110 return V; 111 // Eliminate redundant variables, e.g. x = y, or x = 5, 112 // but keep anything more complicated. 113 if (til::ThreadSafetyTIL::isTrivial(V->definition())) { 114 E = V->definition(); 115 continue; 116 } 117 return V; 118 } 119 if (auto *Ph = dyn_cast<Phi>(E)) { 120 if (Ph->status() == Phi::PH_Incomplete) 121 simplifyIncompleteArg(Ph); 122 // Eliminate redundant Phi nodes. 123 if (Ph->status() == Phi::PH_SingleVal) { 124 E = Ph->values()[0]; 125 continue; 126 } 127 } 128 return E; 129 } 130 } 131 132 133 // Trace the arguments of an incomplete Phi node to see if they have the same 134 // canonical definition. If so, mark the Phi node as redundant. 135 // getCanonicalVal() will recursively call simplifyIncompletePhi(). 136 void til::simplifyIncompleteArg(til::Phi *Ph) { 137 assert(Ph && Ph->status() == Phi::PH_Incomplete); 138 139 // eliminate infinite recursion -- assume that this node is not redundant. 140 Ph->setStatus(Phi::PH_MultiVal); 141 142 SExpr *E0 = simplifyToCanonicalVal(Ph->values()[0]); 143 for (unsigned i=1, n=Ph->values().size(); i<n; ++i) { 144 SExpr *Ei = simplifyToCanonicalVal(Ph->values()[i]); 145 if (Ei == Ph) 146 continue; // Recursive reference to itself. Don't count. 147 if (Ei != E0) { 148 return; // Status is already set to MultiVal. 149 } 150 } 151 Ph->setStatus(Phi::PH_SingleVal); 152 } 153 154 155 // Renumbers the arguments and instructions to have unique, sequential IDs. 156 int BasicBlock::renumberInstrs(int ID) { 157 for (auto *Arg : Args) 158 Arg->setID(this, ID++); 159 for (auto *Instr : Instrs) 160 Instr->setID(this, ID++); 161 TermInstr->setID(this, ID++); 162 return ID; 163 } 164 165 // Sorts the CFGs blocks using a reverse post-order depth-first traversal. 166 // Each block will be written into the Blocks array in order, and its BlockID 167 // will be set to the index in the array. Sorting should start from the entry 168 // block, and ID should be the total number of blocks. 169 int BasicBlock::topologicalSort(SimpleArray<BasicBlock*>& Blocks, int ID) { 170 if (Visited) return ID; 171 Visited = true; 172 for (auto *Block : successors()) 173 ID = Block->topologicalSort(Blocks, ID); 174 // set ID and update block array in place. 175 // We may lose pointers to unreachable blocks. 176 assert(ID > 0); 177 BlockID = --ID; 178 Blocks[BlockID] = this; 179 return ID; 180 } 181 182 // Performs a reverse topological traversal, starting from the exit block and 183 // following back-edges. The dominator is serialized before any predecessors, 184 // which guarantees that all blocks are serialized after their dominator and 185 // before their post-dominator (because it's a reverse topological traversal). 186 // ID should be initially set to 0. 187 // 188 // This sort assumes that (1) dominators have been computed, (2) there are no 189 // critical edges, and (3) the entry block is reachable from the exit block 190 // and no blocks are accessible via traversal of back-edges from the exit that 191 // weren't accessible via forward edges from the entry. 192 int BasicBlock::topologicalFinalSort(SimpleArray<BasicBlock*>& Blocks, int ID) { 193 // Visited is assumed to have been set by the topologicalSort. This pass 194 // assumes !Visited means that we've visited this node before. 195 if (!Visited) return ID; 196 Visited = false; 197 if (DominatorNode.Parent) 198 ID = DominatorNode.Parent->topologicalFinalSort(Blocks, ID); 199 for (auto *Pred : Predecessors) 200 ID = Pred->topologicalFinalSort(Blocks, ID); 201 assert(static_cast<size_t>(ID) < Blocks.size()); 202 BlockID = ID++; 203 Blocks[BlockID] = this; 204 return ID; 205 } 206 207 // Computes the immediate dominator of the current block. Assumes that all of 208 // its predecessors have already computed their dominators. This is achieved 209 // by visiting the nodes in topological order. 210 void BasicBlock::computeDominator() { 211 BasicBlock *Candidate = nullptr; 212 // Walk backwards from each predecessor to find the common dominator node. 213 for (auto *Pred : Predecessors) { 214 // Skip back-edges 215 if (Pred->BlockID >= BlockID) continue; 216 // If we don't yet have a candidate for dominator yet, take this one. 217 if (Candidate == nullptr) { 218 Candidate = Pred; 219 continue; 220 } 221 // Walk the alternate and current candidate back to find a common ancestor. 222 auto *Alternate = Pred; 223 while (Alternate != Candidate) { 224 if (Candidate->BlockID > Alternate->BlockID) 225 Candidate = Candidate->DominatorNode.Parent; 226 else 227 Alternate = Alternate->DominatorNode.Parent; 228 } 229 } 230 DominatorNode.Parent = Candidate; 231 DominatorNode.SizeOfSubTree = 1; 232 } 233 234 // Computes the immediate post-dominator of the current block. Assumes that all 235 // of its successors have already computed their post-dominators. This is 236 // achieved visiting the nodes in reverse topological order. 237 void BasicBlock::computePostDominator() { 238 BasicBlock *Candidate = nullptr; 239 // Walk back from each predecessor to find the common post-dominator node. 240 for (auto *Succ : successors()) { 241 // Skip back-edges 242 if (Succ->BlockID <= BlockID) continue; 243 // If we don't yet have a candidate for post-dominator yet, take this one. 244 if (Candidate == nullptr) { 245 Candidate = Succ; 246 continue; 247 } 248 // Walk the alternate and current candidate back to find a common ancestor. 249 auto *Alternate = Succ; 250 while (Alternate != Candidate) { 251 if (Candidate->BlockID < Alternate->BlockID) 252 Candidate = Candidate->PostDominatorNode.Parent; 253 else 254 Alternate = Alternate->PostDominatorNode.Parent; 255 } 256 } 257 PostDominatorNode.Parent = Candidate; 258 PostDominatorNode.SizeOfSubTree = 1; 259 } 260 261 262 // Renumber instructions in all blocks 263 void SCFG::renumberInstrs() { 264 int InstrID = 0; 265 for (auto *Block : Blocks) 266 InstrID = Block->renumberInstrs(InstrID); 267 } 268 269 270 static inline void computeNodeSize(BasicBlock *B, 271 BasicBlock::TopologyNode BasicBlock::*TN) { 272 BasicBlock::TopologyNode *N = &(B->*TN); 273 if (N->Parent) { 274 BasicBlock::TopologyNode *P = &(N->Parent->*TN); 275 // Initially set ID relative to the (as yet uncomputed) parent ID 276 N->NodeID = P->SizeOfSubTree; 277 P->SizeOfSubTree += N->SizeOfSubTree; 278 } 279 } 280 281 static inline void computeNodeID(BasicBlock *B, 282 BasicBlock::TopologyNode BasicBlock::*TN) { 283 BasicBlock::TopologyNode *N = &(B->*TN); 284 if (N->Parent) { 285 BasicBlock::TopologyNode *P = &(N->Parent->*TN); 286 N->NodeID += P->NodeID; // Fix NodeIDs relative to starting node. 287 } 288 } 289 290 291 // Normalizes a CFG. Normalization has a few major components: 292 // 1) Removing unreachable blocks. 293 // 2) Computing dominators and post-dominators 294 // 3) Topologically sorting the blocks into the "Blocks" array. 295 void SCFG::computeNormalForm() { 296 // Topologically sort the blocks starting from the entry block. 297 int NumUnreachableBlocks = Entry->topologicalSort(Blocks, Blocks.size()); 298 if (NumUnreachableBlocks > 0) { 299 // If there were unreachable blocks shift everything down, and delete them. 300 for (size_t I = NumUnreachableBlocks, E = Blocks.size(); I < E; ++I) { 301 size_t NI = I - NumUnreachableBlocks; 302 Blocks[NI] = Blocks[I]; 303 Blocks[NI]->BlockID = NI; 304 // FIXME: clean up predecessor pointers to unreachable blocks? 305 } 306 Blocks.drop(NumUnreachableBlocks); 307 } 308 309 // Compute dominators. 310 for (auto *Block : Blocks) 311 Block->computeDominator(); 312 313 // Once dominators have been computed, the final sort may be performed. 314 int NumBlocks = Exit->topologicalFinalSort(Blocks, 0); 315 assert(static_cast<size_t>(NumBlocks) == Blocks.size()); 316 (void) NumBlocks; 317 318 // Renumber the instructions now that we have a final sort. 319 renumberInstrs(); 320 321 // Compute post-dominators and compute the sizes of each node in the 322 // dominator tree. 323 for (auto *Block : Blocks.reverse()) { 324 Block->computePostDominator(); 325 computeNodeSize(Block, &BasicBlock::DominatorNode); 326 } 327 // Compute the sizes of each node in the post-dominator tree and assign IDs in 328 // the dominator tree. 329 for (auto *Block : Blocks) { 330 computeNodeID(Block, &BasicBlock::DominatorNode); 331 computeNodeSize(Block, &BasicBlock::PostDominatorNode); 332 } 333 // Assign IDs in the post-dominator tree. 334 for (auto *Block : Blocks.reverse()) { 335 computeNodeID(Block, &BasicBlock::PostDominatorNode); 336 } 337 } 338