1 //===-- ValueEnumerator.cpp - Number values and types for bitcode writer --===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the ValueEnumerator class. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "ValueEnumerator.h" 15 #include "llvm/ADT/STLExtras.h" 16 #include "llvm/ADT/SmallPtrSet.h" 17 #include "llvm/IR/Constants.h" 18 #include "llvm/IR/DerivedTypes.h" 19 #include "llvm/IR/Instructions.h" 20 #include "llvm/IR/Module.h" 21 #include "llvm/IR/UseListOrder.h" 22 #include "llvm/IR/ValueSymbolTable.h" 23 #include "llvm/Support/Debug.h" 24 #include "llvm/Support/raw_ostream.h" 25 #include <algorithm> 26 using namespace llvm; 27 28 namespace { 29 struct OrderMap { 30 DenseMap<const Value *, std::pair<unsigned, bool>> IDs; 31 unsigned LastGlobalConstantID; 32 unsigned LastGlobalValueID; 33 34 OrderMap() : LastGlobalConstantID(0), LastGlobalValueID(0) {} 35 36 bool isGlobalConstant(unsigned ID) const { 37 return ID <= LastGlobalConstantID; 38 } 39 bool isGlobalValue(unsigned ID) const { 40 return ID <= LastGlobalValueID && !isGlobalConstant(ID); 41 } 42 43 unsigned size() const { return IDs.size(); } 44 std::pair<unsigned, bool> &operator[](const Value *V) { return IDs[V]; } 45 std::pair<unsigned, bool> lookup(const Value *V) const { 46 return IDs.lookup(V); 47 } 48 void index(const Value *V) { 49 // Explicitly sequence get-size and insert-value operations to avoid UB. 50 unsigned ID = IDs.size() + 1; 51 IDs[V].first = ID; 52 } 53 }; 54 } 55 56 static void orderValue(const Value *V, OrderMap &OM) { 57 if (OM.lookup(V).first) 58 return; 59 60 if (const Constant *C = dyn_cast<Constant>(V)) 61 if (C->getNumOperands() && !isa<GlobalValue>(C)) 62 for (const Value *Op : C->operands()) 63 if (!isa<BasicBlock>(Op) && !isa<GlobalValue>(Op)) 64 orderValue(Op, OM); 65 66 // Note: we cannot cache this lookup above, since inserting into the map 67 // changes the map's size, and thus affects the other IDs. 68 OM.index(V); 69 } 70 71 static OrderMap orderModule(const Module *M) { 72 // This needs to match the order used by ValueEnumerator::ValueEnumerator() 73 // and ValueEnumerator::incorporateFunction(). 74 OrderMap OM; 75 76 // In the reader, initializers of GlobalValues are set *after* all the 77 // globals have been read. Rather than awkwardly modeling this behaviour 78 // directly in predictValueUseListOrderImpl(), just assign IDs to 79 // initializers of GlobalValues before GlobalValues themselves to model this 80 // implicitly. 81 for (const GlobalVariable &G : M->globals()) 82 if (G.hasInitializer()) 83 if (!isa<GlobalValue>(G.getInitializer())) 84 orderValue(G.getInitializer(), OM); 85 for (const GlobalAlias &A : M->aliases()) 86 if (!isa<GlobalValue>(A.getAliasee())) 87 orderValue(A.getAliasee(), OM); 88 for (const Function &F : *M) 89 if (F.hasPrefixData()) 90 if (!isa<GlobalValue>(F.getPrefixData())) 91 orderValue(F.getPrefixData(), OM); 92 OM.LastGlobalConstantID = OM.size(); 93 94 // Initializers of GlobalValues are processed in 95 // BitcodeReader::ResolveGlobalAndAliasInits(). Match the order there rather 96 // than ValueEnumerator, and match the code in predictValueUseListOrderImpl() 97 // by giving IDs in reverse order. 98 // 99 // Since GlobalValues never reference each other directly (just through 100 // initializers), their relative IDs only matter for determining order of 101 // uses in their initializers. 102 for (const Function &F : *M) 103 orderValue(&F, OM); 104 for (const GlobalAlias &A : M->aliases()) 105 orderValue(&A, OM); 106 for (const GlobalVariable &G : M->globals()) 107 orderValue(&G, OM); 108 OM.LastGlobalValueID = OM.size(); 109 110 for (const Function &F : *M) { 111 if (F.isDeclaration()) 112 continue; 113 // Here we need to match the union of ValueEnumerator::incorporateFunction() 114 // and WriteFunction(). Basic blocks are implicitly declared before 115 // anything else (by declaring their size). 116 for (const BasicBlock &BB : F) 117 orderValue(&BB, OM); 118 for (const Argument &A : F.args()) 119 orderValue(&A, OM); 120 for (const BasicBlock &BB : F) 121 for (const Instruction &I : BB) 122 for (const Value *Op : I.operands()) 123 if ((isa<Constant>(*Op) && !isa<GlobalValue>(*Op)) || 124 isa<InlineAsm>(*Op)) 125 orderValue(Op, OM); 126 for (const BasicBlock &BB : F) 127 for (const Instruction &I : BB) 128 orderValue(&I, OM); 129 } 130 return OM; 131 } 132 133 static void predictValueUseListOrderImpl(const Value *V, const Function *F, 134 unsigned ID, const OrderMap &OM, 135 UseListOrderStack &Stack) { 136 // Predict use-list order for this one. 137 typedef std::pair<const Use *, unsigned> Entry; 138 SmallVector<Entry, 64> List; 139 for (const Use &U : V->uses()) 140 // Check if this user will be serialized. 141 if (OM.lookup(U.getUser()).first) 142 List.push_back(std::make_pair(&U, List.size())); 143 144 if (List.size() < 2) 145 // We may have lost some users. 146 return; 147 148 bool IsGlobalValue = OM.isGlobalValue(ID); 149 std::sort(List.begin(), List.end(), [&](const Entry &L, const Entry &R) { 150 const Use *LU = L.first; 151 const Use *RU = R.first; 152 if (LU == RU) 153 return false; 154 155 auto LID = OM.lookup(LU->getUser()).first; 156 auto RID = OM.lookup(RU->getUser()).first; 157 158 // Global values are processed in reverse order. 159 // 160 // Moreover, initializers of GlobalValues are set *after* all the globals 161 // have been read (despite having earlier IDs). Rather than awkwardly 162 // modeling this behaviour here, orderModule() has assigned IDs to 163 // initializers of GlobalValues before GlobalValues themselves. 164 if (OM.isGlobalValue(LID) && OM.isGlobalValue(RID)) 165 return LID < RID; 166 167 // If ID is 4, then expect: 7 6 5 1 2 3. 168 if (LID < RID) { 169 if (RID <= ID) 170 if (!IsGlobalValue) // GlobalValue uses don't get reversed. 171 return true; 172 return false; 173 } 174 if (RID < LID) { 175 if (LID <= ID) 176 if (!IsGlobalValue) // GlobalValue uses don't get reversed. 177 return false; 178 return true; 179 } 180 181 // LID and RID are equal, so we have different operands of the same user. 182 // Assume operands are added in order for all instructions. 183 if (LID <= ID) 184 if (!IsGlobalValue) // GlobalValue uses don't get reversed. 185 return LU->getOperandNo() < RU->getOperandNo(); 186 return LU->getOperandNo() > RU->getOperandNo(); 187 }); 188 189 if (std::is_sorted( 190 List.begin(), List.end(), 191 [](const Entry &L, const Entry &R) { return L.second < R.second; })) 192 // Order is already correct. 193 return; 194 195 // Store the shuffle. 196 Stack.emplace_back(V, F, List.size()); 197 assert(List.size() == Stack.back().Shuffle.size() && "Wrong size"); 198 for (size_t I = 0, E = List.size(); I != E; ++I) 199 Stack.back().Shuffle[I] = List[I].second; 200 } 201 202 static void predictValueUseListOrder(const Value *V, const Function *F, 203 OrderMap &OM, UseListOrderStack &Stack) { 204 auto &IDPair = OM[V]; 205 assert(IDPair.first && "Unmapped value"); 206 if (IDPair.second) 207 // Already predicted. 208 return; 209 210 // Do the actual prediction. 211 IDPair.second = true; 212 if (!V->use_empty() && std::next(V->use_begin()) != V->use_end()) 213 predictValueUseListOrderImpl(V, F, IDPair.first, OM, Stack); 214 215 // Recursive descent into constants. 216 if (const Constant *C = dyn_cast<Constant>(V)) 217 if (C->getNumOperands()) // Visit GlobalValues. 218 for (const Value *Op : C->operands()) 219 if (isa<Constant>(Op)) // Visit GlobalValues. 220 predictValueUseListOrder(Op, F, OM, Stack); 221 } 222 223 static UseListOrderStack predictUseListOrder(const Module *M) { 224 OrderMap OM = orderModule(M); 225 226 // Use-list orders need to be serialized after all the users have been added 227 // to a value, or else the shuffles will be incomplete. Store them per 228 // function in a stack. 229 // 230 // Aside from function order, the order of values doesn't matter much here. 231 UseListOrderStack Stack; 232 233 // We want to visit the functions backward now so we can list function-local 234 // constants in the last Function they're used in. Module-level constants 235 // have already been visited above. 236 for (auto I = M->rbegin(), E = M->rend(); I != E; ++I) { 237 const Function &F = *I; 238 if (F.isDeclaration()) 239 continue; 240 for (const BasicBlock &BB : F) 241 predictValueUseListOrder(&BB, &F, OM, Stack); 242 for (const Argument &A : F.args()) 243 predictValueUseListOrder(&A, &F, OM, Stack); 244 for (const BasicBlock &BB : F) 245 for (const Instruction &I : BB) 246 for (const Value *Op : I.operands()) 247 if (isa<Constant>(*Op) || isa<InlineAsm>(*Op)) // Visit GlobalValues. 248 predictValueUseListOrder(Op, &F, OM, Stack); 249 for (const BasicBlock &BB : F) 250 for (const Instruction &I : BB) 251 predictValueUseListOrder(&I, &F, OM, Stack); 252 } 253 254 // Visit globals last, since the module-level use-list block will be seen 255 // before the function bodies are processed. 256 for (const GlobalVariable &G : M->globals()) 257 predictValueUseListOrder(&G, nullptr, OM, Stack); 258 for (const Function &F : *M) 259 predictValueUseListOrder(&F, nullptr, OM, Stack); 260 for (const GlobalAlias &A : M->aliases()) 261 predictValueUseListOrder(&A, nullptr, OM, Stack); 262 for (const GlobalVariable &G : M->globals()) 263 if (G.hasInitializer()) 264 predictValueUseListOrder(G.getInitializer(), nullptr, OM, Stack); 265 for (const GlobalAlias &A : M->aliases()) 266 predictValueUseListOrder(A.getAliasee(), nullptr, OM, Stack); 267 for (const Function &F : *M) 268 if (F.hasPrefixData()) 269 predictValueUseListOrder(F.getPrefixData(), nullptr, OM, Stack); 270 271 return Stack; 272 } 273 274 static bool isIntOrIntVectorValue(const std::pair<const Value*, unsigned> &V) { 275 return V.first->getType()->isIntOrIntVectorTy(); 276 } 277 278 /// ValueEnumerator - Enumerate module-level information. 279 ValueEnumerator::ValueEnumerator(const Module *M) { 280 if (shouldPreserveBitcodeUseListOrder()) 281 UseListOrders = predictUseListOrder(M); 282 283 // Enumerate the global variables. 284 for (Module::const_global_iterator I = M->global_begin(), 285 286 E = M->global_end(); I != E; ++I) 287 EnumerateValue(I); 288 289 // Enumerate the functions. 290 for (Module::const_iterator I = M->begin(), E = M->end(); I != E; ++I) { 291 EnumerateValue(I); 292 EnumerateAttributes(cast<Function>(I)->getAttributes()); 293 } 294 295 // Enumerate the aliases. 296 for (Module::const_alias_iterator I = M->alias_begin(), E = M->alias_end(); 297 I != E; ++I) 298 EnumerateValue(I); 299 300 // Remember what is the cutoff between globalvalue's and other constants. 301 unsigned FirstConstant = Values.size(); 302 303 // Enumerate the global variable initializers. 304 for (Module::const_global_iterator I = M->global_begin(), 305 E = M->global_end(); I != E; ++I) 306 if (I->hasInitializer()) 307 EnumerateValue(I->getInitializer()); 308 309 // Enumerate the aliasees. 310 for (Module::const_alias_iterator I = M->alias_begin(), E = M->alias_end(); 311 I != E; ++I) 312 EnumerateValue(I->getAliasee()); 313 314 // Enumerate the prefix data constants. 315 for (Module::const_iterator I = M->begin(), E = M->end(); I != E; ++I) 316 if (I->hasPrefixData()) 317 EnumerateValue(I->getPrefixData()); 318 319 // Insert constants and metadata that are named at module level into the slot 320 // pool so that the module symbol table can refer to them... 321 EnumerateValueSymbolTable(M->getValueSymbolTable()); 322 EnumerateNamedMetadata(M); 323 324 SmallVector<std::pair<unsigned, MDNode*>, 8> MDs; 325 326 // Enumerate types used by function bodies and argument lists. 327 for (const Function &F : *M) { 328 for (const Argument &A : F.args()) 329 EnumerateType(A.getType()); 330 331 for (const BasicBlock &BB : F) 332 for (const Instruction &I : BB) { 333 for (const Use &Op : I.operands()) { 334 if (MDNode *MD = dyn_cast<MDNode>(&Op)) 335 if (MD->isFunctionLocal() && MD->getFunction()) 336 // These will get enumerated during function-incorporation. 337 continue; 338 EnumerateOperandType(Op); 339 } 340 EnumerateType(I.getType()); 341 if (const CallInst *CI = dyn_cast<CallInst>(&I)) 342 EnumerateAttributes(CI->getAttributes()); 343 else if (const InvokeInst *II = dyn_cast<InvokeInst>(&I)) 344 EnumerateAttributes(II->getAttributes()); 345 346 // Enumerate metadata attached with this instruction. 347 MDs.clear(); 348 I.getAllMetadataOtherThanDebugLoc(MDs); 349 for (unsigned i = 0, e = MDs.size(); i != e; ++i) 350 EnumerateMetadata(MDs[i].second); 351 352 if (!I.getDebugLoc().isUnknown()) { 353 MDNode *Scope, *IA; 354 I.getDebugLoc().getScopeAndInlinedAt(Scope, IA, I.getContext()); 355 if (Scope) EnumerateMetadata(Scope); 356 if (IA) EnumerateMetadata(IA); 357 } 358 } 359 } 360 361 // Optimize constant ordering. 362 OptimizeConstants(FirstConstant, Values.size()); 363 } 364 365 unsigned ValueEnumerator::getInstructionID(const Instruction *Inst) const { 366 InstructionMapType::const_iterator I = InstructionMap.find(Inst); 367 assert(I != InstructionMap.end() && "Instruction is not mapped!"); 368 return I->second; 369 } 370 371 unsigned ValueEnumerator::getComdatID(const Comdat *C) const { 372 unsigned ComdatID = Comdats.idFor(C); 373 assert(ComdatID && "Comdat not found!"); 374 return ComdatID; 375 } 376 377 void ValueEnumerator::setInstructionID(const Instruction *I) { 378 InstructionMap[I] = InstructionCount++; 379 } 380 381 unsigned ValueEnumerator::getValueID(const Value *V) const { 382 if (isa<MDNode>(V) || isa<MDString>(V)) { 383 ValueMapType::const_iterator I = MDValueMap.find(V); 384 assert(I != MDValueMap.end() && "Value not in slotcalculator!"); 385 return I->second-1; 386 } 387 388 ValueMapType::const_iterator I = ValueMap.find(V); 389 assert(I != ValueMap.end() && "Value not in slotcalculator!"); 390 return I->second-1; 391 } 392 393 void ValueEnumerator::dump() const { 394 print(dbgs(), ValueMap, "Default"); 395 dbgs() << '\n'; 396 print(dbgs(), MDValueMap, "MetaData"); 397 dbgs() << '\n'; 398 } 399 400 void ValueEnumerator::print(raw_ostream &OS, const ValueMapType &Map, 401 const char *Name) const { 402 403 OS << "Map Name: " << Name << "\n"; 404 OS << "Size: " << Map.size() << "\n"; 405 for (ValueMapType::const_iterator I = Map.begin(), 406 E = Map.end(); I != E; ++I) { 407 408 const Value *V = I->first; 409 if (V->hasName()) 410 OS << "Value: " << V->getName(); 411 else 412 OS << "Value: [null]\n"; 413 V->dump(); 414 415 OS << " Uses(" << std::distance(V->use_begin(),V->use_end()) << "):"; 416 for (const Use &U : V->uses()) { 417 if (&U != &*V->use_begin()) 418 OS << ","; 419 if(U->hasName()) 420 OS << " " << U->getName(); 421 else 422 OS << " [null]"; 423 424 } 425 OS << "\n\n"; 426 } 427 } 428 429 /// OptimizeConstants - Reorder constant pool for denser encoding. 430 void ValueEnumerator::OptimizeConstants(unsigned CstStart, unsigned CstEnd) { 431 if (CstStart == CstEnd || CstStart+1 == CstEnd) return; 432 433 if (shouldPreserveBitcodeUseListOrder()) 434 // Optimizing constants makes the use-list order difficult to predict. 435 // Disable it for now when trying to preserve the order. 436 return; 437 438 std::stable_sort(Values.begin() + CstStart, Values.begin() + CstEnd, 439 [this](const std::pair<const Value *, unsigned> &LHS, 440 const std::pair<const Value *, unsigned> &RHS) { 441 // Sort by plane. 442 if (LHS.first->getType() != RHS.first->getType()) 443 return getTypeID(LHS.first->getType()) < getTypeID(RHS.first->getType()); 444 // Then by frequency. 445 return LHS.second > RHS.second; 446 }); 447 448 // Ensure that integer and vector of integer constants are at the start of the 449 // constant pool. This is important so that GEP structure indices come before 450 // gep constant exprs. 451 std::partition(Values.begin()+CstStart, Values.begin()+CstEnd, 452 isIntOrIntVectorValue); 453 454 // Rebuild the modified portion of ValueMap. 455 for (; CstStart != CstEnd; ++CstStart) 456 ValueMap[Values[CstStart].first] = CstStart+1; 457 } 458 459 460 /// EnumerateValueSymbolTable - Insert all of the values in the specified symbol 461 /// table into the values table. 462 void ValueEnumerator::EnumerateValueSymbolTable(const ValueSymbolTable &VST) { 463 for (ValueSymbolTable::const_iterator VI = VST.begin(), VE = VST.end(); 464 VI != VE; ++VI) 465 EnumerateValue(VI->getValue()); 466 } 467 468 /// EnumerateNamedMetadata - Insert all of the values referenced by 469 /// named metadata in the specified module. 470 void ValueEnumerator::EnumerateNamedMetadata(const Module *M) { 471 for (Module::const_named_metadata_iterator I = M->named_metadata_begin(), 472 E = M->named_metadata_end(); I != E; ++I) 473 EnumerateNamedMDNode(I); 474 } 475 476 void ValueEnumerator::EnumerateNamedMDNode(const NamedMDNode *MD) { 477 for (unsigned i = 0, e = MD->getNumOperands(); i != e; ++i) 478 EnumerateMetadata(MD->getOperand(i)); 479 } 480 481 /// EnumerateMDNodeOperands - Enumerate all non-function-local values 482 /// and types referenced by the given MDNode. 483 void ValueEnumerator::EnumerateMDNodeOperands(const MDNode *N) { 484 for (unsigned i = 0, e = N->getNumOperands(); i != e; ++i) { 485 if (Value *V = N->getOperand(i)) { 486 if (isa<MDNode>(V) || isa<MDString>(V)) 487 EnumerateMetadata(V); 488 else if (!isa<Instruction>(V) && !isa<Argument>(V)) 489 EnumerateValue(V); 490 } else 491 EnumerateType(Type::getVoidTy(N->getContext())); 492 } 493 } 494 495 void ValueEnumerator::EnumerateMetadata(const Value *MD) { 496 assert((isa<MDNode>(MD) || isa<MDString>(MD)) && "Invalid metadata kind"); 497 498 // Enumerate the type of this value. 499 EnumerateType(MD->getType()); 500 501 const MDNode *N = dyn_cast<MDNode>(MD); 502 503 // In the module-level pass, skip function-local nodes themselves, but 504 // do walk their operands. 505 if (N && N->isFunctionLocal() && N->getFunction()) { 506 EnumerateMDNodeOperands(N); 507 return; 508 } 509 510 // Check to see if it's already in! 511 unsigned &MDValueID = MDValueMap[MD]; 512 if (MDValueID) { 513 // Increment use count. 514 MDValues[MDValueID-1].second++; 515 return; 516 } 517 MDValues.push_back(std::make_pair(MD, 1U)); 518 MDValueID = MDValues.size(); 519 520 // Enumerate all non-function-local operands. 521 if (N) 522 EnumerateMDNodeOperands(N); 523 } 524 525 /// EnumerateFunctionLocalMetadataa - Incorporate function-local metadata 526 /// information reachable from the given MDNode. 527 void ValueEnumerator::EnumerateFunctionLocalMetadata(const MDNode *N) { 528 assert(N->isFunctionLocal() && N->getFunction() && 529 "EnumerateFunctionLocalMetadata called on non-function-local mdnode!"); 530 531 // Enumerate the type of this value. 532 EnumerateType(N->getType()); 533 534 // Check to see if it's already in! 535 unsigned &MDValueID = MDValueMap[N]; 536 if (MDValueID) { 537 // Increment use count. 538 MDValues[MDValueID-1].second++; 539 return; 540 } 541 MDValues.push_back(std::make_pair(N, 1U)); 542 MDValueID = MDValues.size(); 543 544 // To incoroporate function-local information visit all function-local 545 // MDNodes and all function-local values they reference. 546 for (unsigned i = 0, e = N->getNumOperands(); i != e; ++i) 547 if (Value *V = N->getOperand(i)) { 548 if (MDNode *O = dyn_cast<MDNode>(V)) { 549 if (O->isFunctionLocal() && O->getFunction()) 550 EnumerateFunctionLocalMetadata(O); 551 } else if (isa<Instruction>(V) || isa<Argument>(V)) 552 EnumerateValue(V); 553 } 554 555 // Also, collect all function-local MDNodes for easy access. 556 FunctionLocalMDs.push_back(N); 557 } 558 559 void ValueEnumerator::EnumerateValue(const Value *V) { 560 assert(!V->getType()->isVoidTy() && "Can't insert void values!"); 561 assert(!isa<MDNode>(V) && !isa<MDString>(V) && 562 "EnumerateValue doesn't handle Metadata!"); 563 564 // Check to see if it's already in! 565 unsigned &ValueID = ValueMap[V]; 566 if (ValueID) { 567 // Increment use count. 568 Values[ValueID-1].second++; 569 return; 570 } 571 572 if (auto *GO = dyn_cast<GlobalObject>(V)) 573 if (const Comdat *C = GO->getComdat()) 574 Comdats.insert(C); 575 576 // Enumerate the type of this value. 577 EnumerateType(V->getType()); 578 579 if (const Constant *C = dyn_cast<Constant>(V)) { 580 if (isa<GlobalValue>(C)) { 581 // Initializers for globals are handled explicitly elsewhere. 582 } else if (C->getNumOperands()) { 583 // If a constant has operands, enumerate them. This makes sure that if a 584 // constant has uses (for example an array of const ints), that they are 585 // inserted also. 586 587 // We prefer to enumerate them with values before we enumerate the user 588 // itself. This makes it more likely that we can avoid forward references 589 // in the reader. We know that there can be no cycles in the constants 590 // graph that don't go through a global variable. 591 for (User::const_op_iterator I = C->op_begin(), E = C->op_end(); 592 I != E; ++I) 593 if (!isa<BasicBlock>(*I)) // Don't enumerate BB operand to BlockAddress. 594 EnumerateValue(*I); 595 596 // Finally, add the value. Doing this could make the ValueID reference be 597 // dangling, don't reuse it. 598 Values.push_back(std::make_pair(V, 1U)); 599 ValueMap[V] = Values.size(); 600 return; 601 } 602 } 603 604 // Add the value. 605 Values.push_back(std::make_pair(V, 1U)); 606 ValueID = Values.size(); 607 } 608 609 610 void ValueEnumerator::EnumerateType(Type *Ty) { 611 unsigned *TypeID = &TypeMap[Ty]; 612 613 // We've already seen this type. 614 if (*TypeID) 615 return; 616 617 // If it is a non-anonymous struct, mark the type as being visited so that we 618 // don't recursively visit it. This is safe because we allow forward 619 // references of these in the bitcode reader. 620 if (StructType *STy = dyn_cast<StructType>(Ty)) 621 if (!STy->isLiteral()) 622 *TypeID = ~0U; 623 624 // Enumerate all of the subtypes before we enumerate this type. This ensures 625 // that the type will be enumerated in an order that can be directly built. 626 for (Type::subtype_iterator I = Ty->subtype_begin(), E = Ty->subtype_end(); 627 I != E; ++I) 628 EnumerateType(*I); 629 630 // Refresh the TypeID pointer in case the table rehashed. 631 TypeID = &TypeMap[Ty]; 632 633 // Check to see if we got the pointer another way. This can happen when 634 // enumerating recursive types that hit the base case deeper than they start. 635 // 636 // If this is actually a struct that we are treating as forward ref'able, 637 // then emit the definition now that all of its contents are available. 638 if (*TypeID && *TypeID != ~0U) 639 return; 640 641 // Add this type now that its contents are all happily enumerated. 642 Types.push_back(Ty); 643 644 *TypeID = Types.size(); 645 } 646 647 // Enumerate the types for the specified value. If the value is a constant, 648 // walk through it, enumerating the types of the constant. 649 void ValueEnumerator::EnumerateOperandType(const Value *V) { 650 EnumerateType(V->getType()); 651 652 if (const Constant *C = dyn_cast<Constant>(V)) { 653 // If this constant is already enumerated, ignore it, we know its type must 654 // be enumerated. 655 if (ValueMap.count(V)) return; 656 657 // This constant may have operands, make sure to enumerate the types in 658 // them. 659 for (unsigned i = 0, e = C->getNumOperands(); i != e; ++i) { 660 const Value *Op = C->getOperand(i); 661 662 // Don't enumerate basic blocks here, this happens as operands to 663 // blockaddress. 664 if (isa<BasicBlock>(Op)) continue; 665 666 EnumerateOperandType(Op); 667 } 668 669 if (const MDNode *N = dyn_cast<MDNode>(V)) { 670 for (unsigned i = 0, e = N->getNumOperands(); i != e; ++i) 671 if (Value *Elem = N->getOperand(i)) 672 EnumerateOperandType(Elem); 673 } 674 } else if (isa<MDString>(V) || isa<MDNode>(V)) 675 EnumerateMetadata(V); 676 } 677 678 void ValueEnumerator::EnumerateAttributes(AttributeSet PAL) { 679 if (PAL.isEmpty()) return; // null is always 0. 680 681 // Do a lookup. 682 unsigned &Entry = AttributeMap[PAL]; 683 if (Entry == 0) { 684 // Never saw this before, add it. 685 Attribute.push_back(PAL); 686 Entry = Attribute.size(); 687 } 688 689 // Do lookups for all attribute groups. 690 for (unsigned i = 0, e = PAL.getNumSlots(); i != e; ++i) { 691 AttributeSet AS = PAL.getSlotAttributes(i); 692 unsigned &Entry = AttributeGroupMap[AS]; 693 if (Entry == 0) { 694 AttributeGroups.push_back(AS); 695 Entry = AttributeGroups.size(); 696 } 697 } 698 } 699 700 void ValueEnumerator::incorporateFunction(const Function &F) { 701 InstructionCount = 0; 702 NumModuleValues = Values.size(); 703 NumModuleMDValues = MDValues.size(); 704 705 // Adding function arguments to the value table. 706 for (Function::const_arg_iterator I = F.arg_begin(), E = F.arg_end(); 707 I != E; ++I) 708 EnumerateValue(I); 709 710 FirstFuncConstantID = Values.size(); 711 712 // Add all function-level constants to the value table. 713 for (Function::const_iterator BB = F.begin(), E = F.end(); BB != E; ++BB) { 714 for (BasicBlock::const_iterator I = BB->begin(), E = BB->end(); I!=E; ++I) 715 for (User::const_op_iterator OI = I->op_begin(), E = I->op_end(); 716 OI != E; ++OI) { 717 if ((isa<Constant>(*OI) && !isa<GlobalValue>(*OI)) || 718 isa<InlineAsm>(*OI)) 719 EnumerateValue(*OI); 720 } 721 BasicBlocks.push_back(BB); 722 ValueMap[BB] = BasicBlocks.size(); 723 } 724 725 // Optimize the constant layout. 726 OptimizeConstants(FirstFuncConstantID, Values.size()); 727 728 // Add the function's parameter attributes so they are available for use in 729 // the function's instruction. 730 EnumerateAttributes(F.getAttributes()); 731 732 FirstInstID = Values.size(); 733 734 SmallVector<MDNode *, 8> FnLocalMDVector; 735 // Add all of the instructions. 736 for (Function::const_iterator BB = F.begin(), E = F.end(); BB != E; ++BB) { 737 for (BasicBlock::const_iterator I = BB->begin(), E = BB->end(); I!=E; ++I) { 738 for (User::const_op_iterator OI = I->op_begin(), E = I->op_end(); 739 OI != E; ++OI) { 740 if (MDNode *MD = dyn_cast<MDNode>(*OI)) 741 if (MD->isFunctionLocal() && MD->getFunction()) 742 // Enumerate metadata after the instructions they might refer to. 743 FnLocalMDVector.push_back(MD); 744 } 745 746 SmallVector<std::pair<unsigned, MDNode*>, 8> MDs; 747 I->getAllMetadataOtherThanDebugLoc(MDs); 748 for (unsigned i = 0, e = MDs.size(); i != e; ++i) { 749 MDNode *N = MDs[i].second; 750 if (N->isFunctionLocal() && N->getFunction()) 751 FnLocalMDVector.push_back(N); 752 } 753 754 if (!I->getType()->isVoidTy()) 755 EnumerateValue(I); 756 } 757 } 758 759 // Add all of the function-local metadata. 760 for (unsigned i = 0, e = FnLocalMDVector.size(); i != e; ++i) 761 EnumerateFunctionLocalMetadata(FnLocalMDVector[i]); 762 } 763 764 void ValueEnumerator::purgeFunction() { 765 /// Remove purged values from the ValueMap. 766 for (unsigned i = NumModuleValues, e = Values.size(); i != e; ++i) 767 ValueMap.erase(Values[i].first); 768 for (unsigned i = NumModuleMDValues, e = MDValues.size(); i != e; ++i) 769 MDValueMap.erase(MDValues[i].first); 770 for (unsigned i = 0, e = BasicBlocks.size(); i != e; ++i) 771 ValueMap.erase(BasicBlocks[i]); 772 773 Values.resize(NumModuleValues); 774 MDValues.resize(NumModuleMDValues); 775 BasicBlocks.clear(); 776 FunctionLocalMDs.clear(); 777 } 778 779 static void IncorporateFunctionInfoGlobalBBIDs(const Function *F, 780 DenseMap<const BasicBlock*, unsigned> &IDMap) { 781 unsigned Counter = 0; 782 for (Function::const_iterator BB = F->begin(), E = F->end(); BB != E; ++BB) 783 IDMap[BB] = ++Counter; 784 } 785 786 /// getGlobalBasicBlockID - This returns the function-specific ID for the 787 /// specified basic block. This is relatively expensive information, so it 788 /// should only be used by rare constructs such as address-of-label. 789 unsigned ValueEnumerator::getGlobalBasicBlockID(const BasicBlock *BB) const { 790 unsigned &Idx = GlobalBasicBlockIDs[BB]; 791 if (Idx != 0) 792 return Idx-1; 793 794 IncorporateFunctionInfoGlobalBBIDs(BB->getParent(), GlobalBasicBlockIDs); 795 return getGlobalBasicBlockID(BB); 796 } 797 798