1 //=-- ExplodedGraph.cpp - Local, Path-Sens. "Exploded Graph" -*- C++ -*------=// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the template classes ExplodedNode and ExplodedGraph, 11 // which represent a path-sensitive, intra-procedural "exploded graph." 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 16 #include "clang/AST/ParentMap.h" 17 #include "clang/AST/Stmt.h" 18 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 19 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 20 #include "llvm/ADT/DenseMap.h" 21 #include "llvm/ADT/DenseSet.h" 22 #include "llvm/ADT/SmallVector.h" 23 #include "llvm/ADT/Statistic.h" 24 #include <vector> 25 26 using namespace clang; 27 using namespace ento; 28 29 //===----------------------------------------------------------------------===// 30 // Node auditing. 31 //===----------------------------------------------------------------------===// 32 33 // An out of line virtual method to provide a home for the class vtable. 34 ExplodedNode::Auditor::~Auditor() {} 35 36 #ifndef NDEBUG 37 static ExplodedNode::Auditor* NodeAuditor = 0; 38 #endif 39 40 void ExplodedNode::SetAuditor(ExplodedNode::Auditor* A) { 41 #ifndef NDEBUG 42 NodeAuditor = A; 43 #endif 44 } 45 46 //===----------------------------------------------------------------------===// 47 // Cleanup. 48 //===----------------------------------------------------------------------===// 49 50 ExplodedGraph::ExplodedGraph() 51 : NumNodes(0), ReclaimNodeInterval(0) {} 52 53 ExplodedGraph::~ExplodedGraph() {} 54 55 //===----------------------------------------------------------------------===// 56 // Node reclamation. 57 //===----------------------------------------------------------------------===// 58 59 bool ExplodedGraph::shouldCollect(const ExplodedNode *node) { 60 // Reclaim all nodes that match *all* the following criteria: 61 // 62 // (1) 1 predecessor (that has one successor) 63 // (2) 1 successor (that has one predecessor) 64 // (3) The ProgramPoint is for a PostStmt, but not a PostStore. 65 // (4) There is no 'tag' for the ProgramPoint. 66 // (5) The 'store' is the same as the predecessor. 67 // (6) The 'GDM' is the same as the predecessor. 68 // (7) The LocationContext is the same as the predecessor. 69 // (8) The PostStmt isn't for a non-consumed Stmt or Expr. 70 // (9) The successor is not a CallExpr StmtPoint (so that we would be able to 71 // find it when retrying a call with no inlining). 72 // FIXME: It may be safe to reclaim PreCall and PostCall nodes as well. 73 74 // Conditions 1 and 2. 75 if (node->pred_size() != 1 || node->succ_size() != 1) 76 return false; 77 78 const ExplodedNode *pred = *(node->pred_begin()); 79 if (pred->succ_size() != 1) 80 return false; 81 82 const ExplodedNode *succ = *(node->succ_begin()); 83 if (succ->pred_size() != 1) 84 return false; 85 86 // Condition 3. 87 ProgramPoint progPoint = node->getLocation(); 88 if (!isa<PostStmt>(progPoint) || isa<PostStore>(progPoint)) 89 return false; 90 91 // Condition 4. 92 PostStmt ps = cast<PostStmt>(progPoint); 93 if (ps.getTag()) 94 return false; 95 96 // Conditions 5, 6, and 7. 97 ProgramStateRef state = node->getState(); 98 ProgramStateRef pred_state = pred->getState(); 99 if (state->store != pred_state->store || state->GDM != pred_state->GDM || 100 progPoint.getLocationContext() != pred->getLocationContext()) 101 return false; 102 103 // Condition 8. 104 // Do not collect nodes for non-consumed Stmt or Expr to ensure precise 105 // diagnostic generation; specifically, so that we could anchor arrows 106 // pointing to the beginning of statements (as written in code). 107 const Expr *Ex = dyn_cast<Expr>(ps.getStmt()); 108 if (!Ex) 109 return false; 110 111 ParentMap &PM = progPoint.getLocationContext()->getParentMap(); 112 if (!PM.isConsumedExpr(Ex)) 113 return false; 114 115 // Condition 9. 116 const ProgramPoint SuccLoc = succ->getLocation(); 117 if (const StmtPoint *SP = dyn_cast<StmtPoint>(&SuccLoc)) 118 if (CallEvent::isCallStmt(SP->getStmt())) 119 return false; 120 121 return true; 122 } 123 124 void ExplodedGraph::collectNode(ExplodedNode *node) { 125 // Removing a node means: 126 // (a) changing the predecessors successor to the successor of this node 127 // (b) changing the successors predecessor to the predecessor of this node 128 // (c) Putting 'node' onto freeNodes. 129 assert(node->pred_size() == 1 || node->succ_size() == 1); 130 ExplodedNode *pred = *(node->pred_begin()); 131 ExplodedNode *succ = *(node->succ_begin()); 132 pred->replaceSuccessor(succ); 133 succ->replacePredecessor(pred); 134 FreeNodes.push_back(node); 135 Nodes.RemoveNode(node); 136 --NumNodes; 137 node->~ExplodedNode(); 138 } 139 140 void ExplodedGraph::reclaimRecentlyAllocatedNodes() { 141 if (ChangedNodes.empty()) 142 return; 143 144 // Only periodically reclaim nodes so that we can build up a set of 145 // nodes that meet the reclamation criteria. Freshly created nodes 146 // by definition have no successor, and thus cannot be reclaimed (see below). 147 assert(ReclaimCounter > 0); 148 if (--ReclaimCounter != 0) 149 return; 150 ReclaimCounter = ReclaimNodeInterval; 151 152 for (NodeVector::iterator it = ChangedNodes.begin(), et = ChangedNodes.end(); 153 it != et; ++it) { 154 ExplodedNode *node = *it; 155 if (shouldCollect(node)) 156 collectNode(node); 157 } 158 ChangedNodes.clear(); 159 } 160 161 //===----------------------------------------------------------------------===// 162 // ExplodedNode. 163 //===----------------------------------------------------------------------===// 164 165 // An NodeGroup's storage type is actually very much like a TinyPtrVector: 166 // it can be either a pointer to a single ExplodedNode, or a pointer to a 167 // BumpVector allocated with the ExplodedGraph's allocator. This allows the 168 // common case of single-node NodeGroups to be implemented with no extra memory. 169 // 170 // Consequently, each of the NodeGroup methods have up to four cases to handle: 171 // 1. The flag is set and this group does not actually contain any nodes. 172 // 2. The group is empty, in which case the storage value is null. 173 // 3. The group contains a single node. 174 // 4. The group contains more than one node. 175 typedef BumpVector<ExplodedNode *> ExplodedNodeVector; 176 typedef llvm::PointerUnion<ExplodedNode *, ExplodedNodeVector *> GroupStorage; 177 178 void ExplodedNode::addPredecessor(ExplodedNode *V, ExplodedGraph &G) { 179 assert (!V->isSink()); 180 Preds.addNode(V, G); 181 V->Succs.addNode(this, G); 182 #ifndef NDEBUG 183 if (NodeAuditor) NodeAuditor->AddEdge(V, this); 184 #endif 185 } 186 187 void ExplodedNode::NodeGroup::replaceNode(ExplodedNode *node) { 188 assert(!getFlag()); 189 190 GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P); 191 assert(Storage.is<ExplodedNode *>()); 192 Storage = node; 193 assert(Storage.is<ExplodedNode *>()); 194 } 195 196 void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) { 197 assert(!getFlag()); 198 199 GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P); 200 if (Storage.isNull()) { 201 Storage = N; 202 assert(Storage.is<ExplodedNode *>()); 203 return; 204 } 205 206 ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>(); 207 208 if (!V) { 209 // Switch from single-node to multi-node representation. 210 ExplodedNode *Old = Storage.get<ExplodedNode *>(); 211 212 BumpVectorContext &Ctx = G.getNodeAllocator(); 213 V = G.getAllocator().Allocate<ExplodedNodeVector>(); 214 new (V) ExplodedNodeVector(Ctx, 4); 215 V->push_back(Old, Ctx); 216 217 Storage = V; 218 assert(!getFlag()); 219 assert(Storage.is<ExplodedNodeVector *>()); 220 } 221 222 V->push_back(N, G.getNodeAllocator()); 223 } 224 225 unsigned ExplodedNode::NodeGroup::size() const { 226 if (getFlag()) 227 return 0; 228 229 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P); 230 if (Storage.isNull()) 231 return 0; 232 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>()) 233 return V->size(); 234 return 1; 235 } 236 237 ExplodedNode * const *ExplodedNode::NodeGroup::begin() const { 238 if (getFlag()) 239 return 0; 240 241 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P); 242 if (Storage.isNull()) 243 return 0; 244 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>()) 245 return V->begin(); 246 return Storage.getAddrOfPtr1(); 247 } 248 249 ExplodedNode * const *ExplodedNode::NodeGroup::end() const { 250 if (getFlag()) 251 return 0; 252 253 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P); 254 if (Storage.isNull()) 255 return 0; 256 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>()) 257 return V->end(); 258 return Storage.getAddrOfPtr1() + 1; 259 } 260 261 ExplodedNode *ExplodedGraph::getNode(const ProgramPoint &L, 262 ProgramStateRef State, 263 bool IsSink, 264 bool* IsNew) { 265 // Profile 'State' to determine if we already have an existing node. 266 llvm::FoldingSetNodeID profile; 267 void *InsertPos = 0; 268 269 NodeTy::Profile(profile, L, State, IsSink); 270 NodeTy* V = Nodes.FindNodeOrInsertPos(profile, InsertPos); 271 272 if (!V) { 273 if (!FreeNodes.empty()) { 274 V = FreeNodes.back(); 275 FreeNodes.pop_back(); 276 } 277 else { 278 // Allocate a new node. 279 V = (NodeTy*) getAllocator().Allocate<NodeTy>(); 280 } 281 282 new (V) NodeTy(L, State, IsSink); 283 284 if (ReclaimNodeInterval) 285 ChangedNodes.push_back(V); 286 287 // Insert the node into the node set and return it. 288 Nodes.InsertNode(V, InsertPos); 289 ++NumNodes; 290 291 if (IsNew) *IsNew = true; 292 } 293 else 294 if (IsNew) *IsNew = false; 295 296 return V; 297 } 298 299 std::pair<ExplodedGraph*, InterExplodedGraphMap*> 300 ExplodedGraph::Trim(const NodeTy* const* NBeg, const NodeTy* const* NEnd, 301 llvm::DenseMap<const void*, const void*> *InverseMap) const { 302 303 if (NBeg == NEnd) 304 return std::make_pair((ExplodedGraph*) 0, 305 (InterExplodedGraphMap*) 0); 306 307 assert (NBeg < NEnd); 308 309 OwningPtr<InterExplodedGraphMap> M(new InterExplodedGraphMap()); 310 311 ExplodedGraph* G = TrimInternal(NBeg, NEnd, M.get(), InverseMap); 312 313 return std::make_pair(static_cast<ExplodedGraph*>(G), M.take()); 314 } 315 316 ExplodedGraph* 317 ExplodedGraph::TrimInternal(const ExplodedNode* const* BeginSources, 318 const ExplodedNode* const* EndSources, 319 InterExplodedGraphMap* M, 320 llvm::DenseMap<const void*, const void*> *InverseMap) const { 321 322 typedef llvm::DenseSet<const ExplodedNode*> Pass1Ty; 323 Pass1Ty Pass1; 324 325 typedef llvm::DenseMap<const ExplodedNode*, ExplodedNode*> Pass2Ty; 326 Pass2Ty& Pass2 = M->M; 327 328 SmallVector<const ExplodedNode*, 10> WL1, WL2; 329 330 // ===- Pass 1 (reverse DFS) -=== 331 for (const ExplodedNode* const* I = BeginSources; I != EndSources; ++I) { 332 if (*I) 333 WL1.push_back(*I); 334 } 335 336 // Process the first worklist until it is empty. Because it is a std::list 337 // it acts like a FIFO queue. 338 while (!WL1.empty()) { 339 const ExplodedNode *N = WL1.back(); 340 WL1.pop_back(); 341 342 // Have we already visited this node? If so, continue to the next one. 343 if (Pass1.count(N)) 344 continue; 345 346 // Otherwise, mark this node as visited. 347 Pass1.insert(N); 348 349 // If this is a root enqueue it to the second worklist. 350 if (N->Preds.empty()) { 351 WL2.push_back(N); 352 continue; 353 } 354 355 // Visit our predecessors and enqueue them. 356 for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end(); 357 I != E; ++I) 358 WL1.push_back(*I); 359 } 360 361 // We didn't hit a root? Return with a null pointer for the new graph. 362 if (WL2.empty()) 363 return 0; 364 365 // Create an empty graph. 366 ExplodedGraph* G = MakeEmptyGraph(); 367 368 // ===- Pass 2 (forward DFS to construct the new graph) -=== 369 while (!WL2.empty()) { 370 const ExplodedNode *N = WL2.back(); 371 WL2.pop_back(); 372 373 // Skip this node if we have already processed it. 374 if (Pass2.find(N) != Pass2.end()) 375 continue; 376 377 // Create the corresponding node in the new graph and record the mapping 378 // from the old node to the new node. 379 ExplodedNode *NewN = G->getNode(N->getLocation(), N->State, N->isSink(), 0); 380 Pass2[N] = NewN; 381 382 // Also record the reverse mapping from the new node to the old node. 383 if (InverseMap) (*InverseMap)[NewN] = N; 384 385 // If this node is a root, designate it as such in the graph. 386 if (N->Preds.empty()) 387 G->addRoot(NewN); 388 389 // In the case that some of the intended predecessors of NewN have already 390 // been created, we should hook them up as predecessors. 391 392 // Walk through the predecessors of 'N' and hook up their corresponding 393 // nodes in the new graph (if any) to the freshly created node. 394 for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end(); 395 I != E; ++I) { 396 Pass2Ty::iterator PI = Pass2.find(*I); 397 if (PI == Pass2.end()) 398 continue; 399 400 NewN->addPredecessor(PI->second, *G); 401 } 402 403 // In the case that some of the intended successors of NewN have already 404 // been created, we should hook them up as successors. Otherwise, enqueue 405 // the new nodes from the original graph that should have nodes created 406 // in the new graph. 407 for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end(); 408 I != E; ++I) { 409 Pass2Ty::iterator PI = Pass2.find(*I); 410 if (PI != Pass2.end()) { 411 PI->second->addPredecessor(NewN, *G); 412 continue; 413 } 414 415 // Enqueue nodes to the worklist that were marked during pass 1. 416 if (Pass1.count(*I)) 417 WL2.push_back(*I); 418 } 419 } 420 421 return G; 422 } 423 424 void InterExplodedGraphMap::anchor() { } 425 426 ExplodedNode* 427 InterExplodedGraphMap::getMappedNode(const ExplodedNode *N) const { 428 llvm::DenseMap<const ExplodedNode*, ExplodedNode*>::const_iterator I = 429 M.find(N); 430 431 return I == M.end() ? 0 : I->second; 432 } 433 434