1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file contains code dealing with the IR generation for cleanups 11 // and related information. 12 // 13 // A "cleanup" is a piece of code which needs to be executed whenever 14 // control transfers out of a particular scope. This can be 15 // conditionalized to occur only on exceptional control flow, only on 16 // normal control flow, or both. 17 // 18 //===----------------------------------------------------------------------===// 19 20 #include "CGCleanup.h" 21 #include "CodeGenFunction.h" 22 #include "llvm/Support/SaveAndRestore.h" 23 24 using namespace clang; 25 using namespace CodeGen; 26 27 bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) { 28 if (rv.isScalar()) 29 return DominatingLLVMValue::needsSaving(rv.getScalarVal()); 30 if (rv.isAggregate()) 31 return DominatingLLVMValue::needsSaving(rv.getAggregatePointer()); 32 return true; 33 } 34 35 DominatingValue<RValue>::saved_type 36 DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) { 37 if (rv.isScalar()) { 38 llvm::Value *V = rv.getScalarVal(); 39 40 // These automatically dominate and don't need to be saved. 41 if (!DominatingLLVMValue::needsSaving(V)) 42 return saved_type(V, ScalarLiteral); 43 44 // Everything else needs an alloca. 45 Address addr = 46 CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue"); 47 CGF.Builder.CreateStore(V, addr); 48 return saved_type(addr.getPointer(), ScalarAddress); 49 } 50 51 if (rv.isComplex()) { 52 CodeGenFunction::ComplexPairTy V = rv.getComplexVal(); 53 llvm::Type *ComplexTy = 54 llvm::StructType::get(V.first->getType(), V.second->getType(), 55 (void*) nullptr); 56 Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex"); 57 CGF.Builder.CreateStore(V.first, 58 CGF.Builder.CreateStructGEP(addr, 0, CharUnits())); 59 CharUnits offset = CharUnits::fromQuantity( 60 CGF.CGM.getDataLayout().getTypeAllocSize(V.first->getType())); 61 CGF.Builder.CreateStore(V.second, 62 CGF.Builder.CreateStructGEP(addr, 1, offset)); 63 return saved_type(addr.getPointer(), ComplexAddress); 64 } 65 66 assert(rv.isAggregate()); 67 Address V = rv.getAggregateAddress(); // TODO: volatile? 68 if (!DominatingLLVMValue::needsSaving(V.getPointer())) 69 return saved_type(V.getPointer(), AggregateLiteral, 70 V.getAlignment().getQuantity()); 71 72 Address addr = 73 CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue"); 74 CGF.Builder.CreateStore(V.getPointer(), addr); 75 return saved_type(addr.getPointer(), AggregateAddress, 76 V.getAlignment().getQuantity()); 77 } 78 79 /// Given a saved r-value produced by SaveRValue, perform the code 80 /// necessary to restore it to usability at the current insertion 81 /// point. 82 RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) { 83 auto getSavingAddress = [&](llvm::Value *value) { 84 auto alignment = cast<llvm::AllocaInst>(value)->getAlignment(); 85 return Address(value, CharUnits::fromQuantity(alignment)); 86 }; 87 switch (K) { 88 case ScalarLiteral: 89 return RValue::get(Value); 90 case ScalarAddress: 91 return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value))); 92 case AggregateLiteral: 93 return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align))); 94 case AggregateAddress: { 95 auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value)); 96 return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align))); 97 } 98 case ComplexAddress: { 99 Address address = getSavingAddress(Value); 100 llvm::Value *real = CGF.Builder.CreateLoad( 101 CGF.Builder.CreateStructGEP(address, 0, CharUnits())); 102 CharUnits offset = CharUnits::fromQuantity( 103 CGF.CGM.getDataLayout().getTypeAllocSize(real->getType())); 104 llvm::Value *imag = CGF.Builder.CreateLoad( 105 CGF.Builder.CreateStructGEP(address, 1, offset)); 106 return RValue::getComplex(real, imag); 107 } 108 } 109 110 llvm_unreachable("bad saved r-value kind"); 111 } 112 113 /// Push an entry of the given size onto this protected-scope stack. 114 char *EHScopeStack::allocate(size_t Size) { 115 Size = llvm::RoundUpToAlignment(Size, ScopeStackAlignment); 116 if (!StartOfBuffer) { 117 unsigned Capacity = 1024; 118 while (Capacity < Size) Capacity *= 2; 119 StartOfBuffer = new char[Capacity]; 120 StartOfData = EndOfBuffer = StartOfBuffer + Capacity; 121 } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) { 122 unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer; 123 unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer); 124 125 unsigned NewCapacity = CurrentCapacity; 126 do { 127 NewCapacity *= 2; 128 } while (NewCapacity < UsedCapacity + Size); 129 130 char *NewStartOfBuffer = new char[NewCapacity]; 131 char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity; 132 char *NewStartOfData = NewEndOfBuffer - UsedCapacity; 133 memcpy(NewStartOfData, StartOfData, UsedCapacity); 134 delete [] StartOfBuffer; 135 StartOfBuffer = NewStartOfBuffer; 136 EndOfBuffer = NewEndOfBuffer; 137 StartOfData = NewStartOfData; 138 } 139 140 assert(StartOfBuffer + Size <= StartOfData); 141 StartOfData -= Size; 142 return StartOfData; 143 } 144 145 void EHScopeStack::deallocate(size_t Size) { 146 StartOfData += llvm::RoundUpToAlignment(Size, ScopeStackAlignment); 147 } 148 149 bool EHScopeStack::containsOnlyLifetimeMarkers( 150 EHScopeStack::stable_iterator Old) const { 151 for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) { 152 EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it); 153 if (!cleanup || !cleanup->isLifetimeMarker()) 154 return false; 155 } 156 157 return true; 158 } 159 160 EHScopeStack::stable_iterator 161 EHScopeStack::getInnermostActiveNormalCleanup() const { 162 for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end(); 163 si != se; ) { 164 EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si)); 165 if (cleanup.isActive()) return si; 166 si = cleanup.getEnclosingNormalCleanup(); 167 } 168 return stable_end(); 169 } 170 171 172 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) { 173 char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size)); 174 bool IsNormalCleanup = Kind & NormalCleanup; 175 bool IsEHCleanup = Kind & EHCleanup; 176 bool IsActive = !(Kind & InactiveCleanup); 177 EHCleanupScope *Scope = 178 new (Buffer) EHCleanupScope(IsNormalCleanup, 179 IsEHCleanup, 180 IsActive, 181 Size, 182 BranchFixups.size(), 183 InnermostNormalCleanup, 184 InnermostEHScope); 185 if (IsNormalCleanup) 186 InnermostNormalCleanup = stable_begin(); 187 if (IsEHCleanup) 188 InnermostEHScope = stable_begin(); 189 190 return Scope->getCleanupBuffer(); 191 } 192 193 void EHScopeStack::popCleanup() { 194 assert(!empty() && "popping exception stack when not empty"); 195 196 assert(isa<EHCleanupScope>(*begin())); 197 EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin()); 198 InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); 199 InnermostEHScope = Cleanup.getEnclosingEHScope(); 200 deallocate(Cleanup.getAllocatedSize()); 201 202 // Destroy the cleanup. 203 Cleanup.Destroy(); 204 205 // Check whether we can shrink the branch-fixups stack. 206 if (!BranchFixups.empty()) { 207 // If we no longer have any normal cleanups, all the fixups are 208 // complete. 209 if (!hasNormalCleanups()) 210 BranchFixups.clear(); 211 212 // Otherwise we can still trim out unnecessary nulls. 213 else 214 popNullFixups(); 215 } 216 } 217 218 EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) { 219 assert(getInnermostEHScope() == stable_end()); 220 char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters)); 221 EHFilterScope *filter = new (buffer) EHFilterScope(numFilters); 222 InnermostEHScope = stable_begin(); 223 return filter; 224 } 225 226 void EHScopeStack::popFilter() { 227 assert(!empty() && "popping exception stack when not empty"); 228 229 EHFilterScope &filter = cast<EHFilterScope>(*begin()); 230 deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters())); 231 232 InnermostEHScope = filter.getEnclosingEHScope(); 233 } 234 235 EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) { 236 char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers)); 237 EHCatchScope *scope = 238 new (buffer) EHCatchScope(numHandlers, InnermostEHScope); 239 InnermostEHScope = stable_begin(); 240 return scope; 241 } 242 243 void EHScopeStack::pushTerminate() { 244 char *Buffer = allocate(EHTerminateScope::getSize()); 245 new (Buffer) EHTerminateScope(InnermostEHScope); 246 InnermostEHScope = stable_begin(); 247 } 248 249 void EHScopeStack::pushPadEnd(llvm::BasicBlock *PadEndBB) { 250 char *Buffer = allocate(EHPadEndScope::getSize()); 251 auto *CES = new (Buffer) EHPadEndScope(InnermostEHScope); 252 CES->setCachedEHDispatchBlock(PadEndBB); 253 InnermostEHScope = stable_begin(); 254 } 255 256 /// Remove any 'null' fixups on the stack. However, we can't pop more 257 /// fixups than the fixup depth on the innermost normal cleanup, or 258 /// else fixups that we try to add to that cleanup will end up in the 259 /// wrong place. We *could* try to shrink fixup depths, but that's 260 /// actually a lot of work for little benefit. 261 void EHScopeStack::popNullFixups() { 262 // We expect this to only be called when there's still an innermost 263 // normal cleanup; otherwise there really shouldn't be any fixups. 264 assert(hasNormalCleanups()); 265 266 EHScopeStack::iterator it = find(InnermostNormalCleanup); 267 unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth(); 268 assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); 269 270 while (BranchFixups.size() > MinSize && 271 BranchFixups.back().Destination == nullptr) 272 BranchFixups.pop_back(); 273 } 274 275 void CodeGenFunction::initFullExprCleanup() { 276 // Create a variable to decide whether the cleanup needs to be run. 277 Address active = CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(), 278 "cleanup.cond"); 279 280 // Initialize it to false at a site that's guaranteed to be run 281 // before each evaluation. 282 setBeforeOutermostConditional(Builder.getFalse(), active); 283 284 // Initialize it to true at the current location. 285 Builder.CreateStore(Builder.getTrue(), active); 286 287 // Set that as the active flag in the cleanup. 288 EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin()); 289 assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?"); 290 cleanup.setActiveFlag(active); 291 292 if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup(); 293 if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup(); 294 } 295 296 void EHScopeStack::Cleanup::anchor() {} 297 298 static void createStoreInstBefore(llvm::Value *value, Address addr, 299 llvm::Instruction *beforeInst) { 300 auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst); 301 store->setAlignment(addr.getAlignment().getQuantity()); 302 } 303 304 static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name, 305 llvm::Instruction *beforeInst) { 306 auto load = new llvm::LoadInst(addr.getPointer(), name, beforeInst); 307 load->setAlignment(addr.getAlignment().getQuantity()); 308 return load; 309 } 310 311 /// All the branch fixups on the EH stack have propagated out past the 312 /// outermost normal cleanup; resolve them all by adding cases to the 313 /// given switch instruction. 314 static void ResolveAllBranchFixups(CodeGenFunction &CGF, 315 llvm::SwitchInst *Switch, 316 llvm::BasicBlock *CleanupEntry) { 317 llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded; 318 319 for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) { 320 // Skip this fixup if its destination isn't set. 321 BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I); 322 if (Fixup.Destination == nullptr) continue; 323 324 // If there isn't an OptimisticBranchBlock, then InitialBranch is 325 // still pointing directly to its destination; forward it to the 326 // appropriate cleanup entry. This is required in the specific 327 // case of 328 // { std::string s; goto lbl; } 329 // lbl: 330 // i.e. where there's an unresolved fixup inside a single cleanup 331 // entry which we're currently popping. 332 if (Fixup.OptimisticBranchBlock == nullptr) { 333 createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex), 334 CGF.getNormalCleanupDestSlot(), 335 Fixup.InitialBranch); 336 Fixup.InitialBranch->setSuccessor(0, CleanupEntry); 337 } 338 339 // Don't add this case to the switch statement twice. 340 if (!CasesAdded.insert(Fixup.Destination).second) 341 continue; 342 343 Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex), 344 Fixup.Destination); 345 } 346 347 CGF.EHStack.clearFixups(); 348 } 349 350 /// Transitions the terminator of the given exit-block of a cleanup to 351 /// be a cleanup switch. 352 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF, 353 llvm::BasicBlock *Block) { 354 // If it's a branch, turn it into a switch whose default 355 // destination is its original target. 356 llvm::TerminatorInst *Term = Block->getTerminator(); 357 assert(Term && "can't transition block without terminator"); 358 359 if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) { 360 assert(Br->isUnconditional()); 361 auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(), 362 "cleanup.dest", Term); 363 llvm::SwitchInst *Switch = 364 llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block); 365 Br->eraseFromParent(); 366 return Switch; 367 } else { 368 return cast<llvm::SwitchInst>(Term); 369 } 370 } 371 372 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) { 373 assert(Block && "resolving a null target block"); 374 if (!EHStack.getNumBranchFixups()) return; 375 376 assert(EHStack.hasNormalCleanups() && 377 "branch fixups exist with no normal cleanups on stack"); 378 379 llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks; 380 bool ResolvedAny = false; 381 382 for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) { 383 // Skip this fixup if its destination doesn't match. 384 BranchFixup &Fixup = EHStack.getBranchFixup(I); 385 if (Fixup.Destination != Block) continue; 386 387 Fixup.Destination = nullptr; 388 ResolvedAny = true; 389 390 // If it doesn't have an optimistic branch block, LatestBranch is 391 // already pointing to the right place. 392 llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock; 393 if (!BranchBB) 394 continue; 395 396 // Don't process the same optimistic branch block twice. 397 if (!ModifiedOptimisticBlocks.insert(BranchBB).second) 398 continue; 399 400 llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB); 401 402 // Add a case to the switch. 403 Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block); 404 } 405 406 if (ResolvedAny) 407 EHStack.popNullFixups(); 408 } 409 410 /// Pops cleanup blocks until the given savepoint is reached. 411 void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) { 412 assert(Old.isValid()); 413 414 while (EHStack.stable_begin() != Old) { 415 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); 416 417 // As long as Old strictly encloses the scope's enclosing normal 418 // cleanup, we're going to emit another normal cleanup which 419 // fallthrough can propagate through. 420 bool FallThroughIsBranchThrough = 421 Old.strictlyEncloses(Scope.getEnclosingNormalCleanup()); 422 423 PopCleanupBlock(FallThroughIsBranchThrough); 424 } 425 } 426 427 /// Pops cleanup blocks until the given savepoint is reached, then add the 428 /// cleanups from the given savepoint in the lifetime-extended cleanups stack. 429 void 430 CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old, 431 size_t OldLifetimeExtendedSize) { 432 PopCleanupBlocks(Old); 433 434 // Move our deferred cleanups onto the EH stack. 435 for (size_t I = OldLifetimeExtendedSize, 436 E = LifetimeExtendedCleanupStack.size(); I != E; /**/) { 437 // Alignment should be guaranteed by the vptrs in the individual cleanups. 438 assert((I % llvm::alignOf<LifetimeExtendedCleanupHeader>() == 0) && 439 "misaligned cleanup stack entry"); 440 441 LifetimeExtendedCleanupHeader &Header = 442 reinterpret_cast<LifetimeExtendedCleanupHeader&>( 443 LifetimeExtendedCleanupStack[I]); 444 I += sizeof(Header); 445 446 EHStack.pushCopyOfCleanup(Header.getKind(), 447 &LifetimeExtendedCleanupStack[I], 448 Header.getSize()); 449 I += Header.getSize(); 450 } 451 LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize); 452 } 453 454 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF, 455 EHCleanupScope &Scope) { 456 assert(Scope.isNormalCleanup()); 457 llvm::BasicBlock *Entry = Scope.getNormalBlock(); 458 if (!Entry) { 459 Entry = CGF.createBasicBlock("cleanup"); 460 Scope.setNormalBlock(Entry); 461 } 462 return Entry; 463 } 464 465 /// Attempts to reduce a cleanup's entry block to a fallthrough. This 466 /// is basically llvm::MergeBlockIntoPredecessor, except 467 /// simplified/optimized for the tighter constraints on cleanup blocks. 468 /// 469 /// Returns the new block, whatever it is. 470 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF, 471 llvm::BasicBlock *Entry) { 472 llvm::BasicBlock *Pred = Entry->getSinglePredecessor(); 473 if (!Pred) return Entry; 474 475 llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator()); 476 if (!Br || Br->isConditional()) return Entry; 477 assert(Br->getSuccessor(0) == Entry); 478 479 // If we were previously inserting at the end of the cleanup entry 480 // block, we'll need to continue inserting at the end of the 481 // predecessor. 482 bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry; 483 assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end()); 484 485 // Kill the branch. 486 Br->eraseFromParent(); 487 488 // Replace all uses of the entry with the predecessor, in case there 489 // are phis in the cleanup. 490 Entry->replaceAllUsesWith(Pred); 491 492 // Merge the blocks. 493 Pred->getInstList().splice(Pred->end(), Entry->getInstList()); 494 495 // Kill the entry block. 496 Entry->eraseFromParent(); 497 498 if (WasInsertBlock) 499 CGF.Builder.SetInsertPoint(Pred); 500 501 return Pred; 502 } 503 504 static void EmitCleanup(CodeGenFunction &CGF, 505 EHScopeStack::Cleanup *Fn, 506 EHScopeStack::Cleanup::Flags flags, 507 Address ActiveFlag) { 508 // If there's an active flag, load it and skip the cleanup if it's 509 // false. 510 llvm::BasicBlock *ContBB = nullptr; 511 if (ActiveFlag.isValid()) { 512 ContBB = CGF.createBasicBlock("cleanup.done"); 513 llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action"); 514 llvm::Value *IsActive 515 = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active"); 516 CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB); 517 CGF.EmitBlock(CleanupBB); 518 } 519 520 // Ask the cleanup to emit itself. 521 Fn->Emit(CGF, flags); 522 assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?"); 523 524 // Emit the continuation block if there was an active flag. 525 if (ActiveFlag.isValid()) 526 CGF.EmitBlock(ContBB); 527 } 528 529 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit, 530 llvm::BasicBlock *From, 531 llvm::BasicBlock *To) { 532 // Exit is the exit block of a cleanup, so it always terminates in 533 // an unconditional branch or a switch. 534 llvm::TerminatorInst *Term = Exit->getTerminator(); 535 536 if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) { 537 assert(Br->isUnconditional() && Br->getSuccessor(0) == From); 538 Br->setSuccessor(0, To); 539 } else { 540 llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term); 541 for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I) 542 if (Switch->getSuccessor(I) == From) 543 Switch->setSuccessor(I, To); 544 } 545 } 546 547 /// We don't need a normal entry block for the given cleanup. 548 /// Optimistic fixup branches can cause these blocks to come into 549 /// existence anyway; if so, destroy it. 550 /// 551 /// The validity of this transformation is very much specific to the 552 /// exact ways in which we form branches to cleanup entries. 553 static void destroyOptimisticNormalEntry(CodeGenFunction &CGF, 554 EHCleanupScope &scope) { 555 llvm::BasicBlock *entry = scope.getNormalBlock(); 556 if (!entry) return; 557 558 // Replace all the uses with unreachable. 559 llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock(); 560 for (llvm::BasicBlock::use_iterator 561 i = entry->use_begin(), e = entry->use_end(); i != e; ) { 562 llvm::Use &use = *i; 563 ++i; 564 565 use.set(unreachableBB); 566 567 // The only uses should be fixup switches. 568 llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser()); 569 if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) { 570 // Replace the switch with a branch. 571 llvm::BranchInst::Create(si->case_begin().getCaseSuccessor(), si); 572 573 // The switch operand is a load from the cleanup-dest alloca. 574 llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition()); 575 576 // Destroy the switch. 577 si->eraseFromParent(); 578 579 // Destroy the load. 580 assert(condition->getOperand(0) == CGF.NormalCleanupDest); 581 assert(condition->use_empty()); 582 condition->eraseFromParent(); 583 } 584 } 585 586 assert(entry->use_empty()); 587 delete entry; 588 } 589 590 /// Pops a cleanup block. If the block includes a normal cleanup, the 591 /// current insertion point is threaded through the cleanup, as are 592 /// any branch fixups on the cleanup. 593 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { 594 assert(!EHStack.empty() && "cleanup stack is empty!"); 595 assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!"); 596 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); 597 assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups()); 598 599 // Remember activation information. 600 bool IsActive = Scope.isActive(); 601 Address NormalActiveFlag = 602 Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag() 603 : Address::invalid(); 604 Address EHActiveFlag = 605 Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag() 606 : Address::invalid(); 607 608 // Check whether we need an EH cleanup. This is only true if we've 609 // generated a lazy EH cleanup block. 610 llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock(); 611 assert(Scope.hasEHBranches() == (EHEntry != nullptr)); 612 bool RequiresEHCleanup = (EHEntry != nullptr); 613 EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope(); 614 615 // Check the three conditions which might require a normal cleanup: 616 617 // - whether there are branch fix-ups through this cleanup 618 unsigned FixupDepth = Scope.getFixupDepth(); 619 bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth; 620 621 // - whether there are branch-throughs or branch-afters 622 bool HasExistingBranches = Scope.hasBranches(); 623 624 // - whether there's a fallthrough 625 llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock(); 626 bool HasFallthrough = (FallthroughSource != nullptr && IsActive); 627 628 // Branch-through fall-throughs leave the insertion point set to the 629 // end of the last cleanup, which points to the current scope. The 630 // rest of IR gen doesn't need to worry about this; it only happens 631 // during the execution of PopCleanupBlocks(). 632 bool HasPrebranchedFallthrough = 633 (FallthroughSource && FallthroughSource->getTerminator()); 634 635 // If this is a normal cleanup, then having a prebranched 636 // fallthrough implies that the fallthrough source unconditionally 637 // jumps here. 638 assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough || 639 (Scope.getNormalBlock() && 640 FallthroughSource->getTerminator()->getSuccessor(0) 641 == Scope.getNormalBlock())); 642 643 bool RequiresNormalCleanup = false; 644 if (Scope.isNormalCleanup() && 645 (HasFixups || HasExistingBranches || HasFallthrough)) { 646 RequiresNormalCleanup = true; 647 } 648 649 // If we have a prebranched fallthrough into an inactive normal 650 // cleanup, rewrite it so that it leads to the appropriate place. 651 if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) { 652 llvm::BasicBlock *prebranchDest; 653 654 // If the prebranch is semantically branching through the next 655 // cleanup, just forward it to the next block, leaving the 656 // insertion point in the prebranched block. 657 if (FallthroughIsBranchThrough) { 658 EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup()); 659 prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing)); 660 661 // Otherwise, we need to make a new block. If the normal cleanup 662 // isn't being used at all, we could actually reuse the normal 663 // entry block, but this is simpler, and it avoids conflicts with 664 // dead optimistic fixup branches. 665 } else { 666 prebranchDest = createBasicBlock("forwarded-prebranch"); 667 EmitBlock(prebranchDest); 668 } 669 670 llvm::BasicBlock *normalEntry = Scope.getNormalBlock(); 671 assert(normalEntry && !normalEntry->use_empty()); 672 673 ForwardPrebranchedFallthrough(FallthroughSource, 674 normalEntry, prebranchDest); 675 } 676 677 // If we don't need the cleanup at all, we're done. 678 if (!RequiresNormalCleanup && !RequiresEHCleanup) { 679 destroyOptimisticNormalEntry(*this, Scope); 680 EHStack.popCleanup(); // safe because there are no fixups 681 assert(EHStack.getNumBranchFixups() == 0 || 682 EHStack.hasNormalCleanups()); 683 return; 684 } 685 686 // Copy the cleanup emission data out. Note that SmallVector 687 // guarantees maximal alignment for its buffer regardless of its 688 // type parameter. 689 auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer()); 690 SmallVector<char, 8 * sizeof(void *)> CleanupBuffer( 691 CleanupSource, CleanupSource + Scope.getCleanupSize()); 692 auto *Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBuffer.data()); 693 694 EHScopeStack::Cleanup::Flags cleanupFlags; 695 if (Scope.isNormalCleanup()) 696 cleanupFlags.setIsNormalCleanupKind(); 697 if (Scope.isEHCleanup()) 698 cleanupFlags.setIsEHCleanupKind(); 699 700 if (!RequiresNormalCleanup) { 701 destroyOptimisticNormalEntry(*this, Scope); 702 EHStack.popCleanup(); 703 } else { 704 // If we have a fallthrough and no other need for the cleanup, 705 // emit it directly. 706 if (HasFallthrough && !HasPrebranchedFallthrough && 707 !HasFixups && !HasExistingBranches) { 708 709 destroyOptimisticNormalEntry(*this, Scope); 710 EHStack.popCleanup(); 711 712 EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag); 713 714 // Otherwise, the best approach is to thread everything through 715 // the cleanup block and then try to clean up after ourselves. 716 } else { 717 // Force the entry block to exist. 718 llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope); 719 720 // I. Set up the fallthrough edge in. 721 722 CGBuilderTy::InsertPoint savedInactiveFallthroughIP; 723 724 // If there's a fallthrough, we need to store the cleanup 725 // destination index. For fall-throughs this is always zero. 726 if (HasFallthrough) { 727 if (!HasPrebranchedFallthrough) 728 Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot()); 729 730 // Otherwise, save and clear the IP if we don't have fallthrough 731 // because the cleanup is inactive. 732 } else if (FallthroughSource) { 733 assert(!IsActive && "source without fallthrough for active cleanup"); 734 savedInactiveFallthroughIP = Builder.saveAndClearIP(); 735 } 736 737 // II. Emit the entry block. This implicitly branches to it if 738 // we have fallthrough. All the fixups and existing branches 739 // should already be branched to it. 740 EmitBlock(NormalEntry); 741 742 // III. Figure out where we're going and build the cleanup 743 // epilogue. 744 745 bool HasEnclosingCleanups = 746 (Scope.getEnclosingNormalCleanup() != EHStack.stable_end()); 747 748 // Compute the branch-through dest if we need it: 749 // - if there are branch-throughs threaded through the scope 750 // - if fall-through is a branch-through 751 // - if there are fixups that will be optimistically forwarded 752 // to the enclosing cleanup 753 llvm::BasicBlock *BranchThroughDest = nullptr; 754 if (Scope.hasBranchThroughs() || 755 (FallthroughSource && FallthroughIsBranchThrough) || 756 (HasFixups && HasEnclosingCleanups)) { 757 assert(HasEnclosingCleanups); 758 EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup()); 759 BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S)); 760 } 761 762 llvm::BasicBlock *FallthroughDest = nullptr; 763 SmallVector<llvm::Instruction*, 2> InstsToAppend; 764 765 // If there's exactly one branch-after and no other threads, 766 // we can route it without a switch. 767 if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough && 768 Scope.getNumBranchAfters() == 1) { 769 assert(!BranchThroughDest || !IsActive); 770 771 // Clean up the possibly dead store to the cleanup dest slot. 772 llvm::Instruction *NormalCleanupDestSlot = 773 cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer()); 774 if (NormalCleanupDestSlot->hasOneUse()) { 775 NormalCleanupDestSlot->user_back()->eraseFromParent(); 776 NormalCleanupDestSlot->eraseFromParent(); 777 NormalCleanupDest = nullptr; 778 } 779 780 llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0); 781 InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter)); 782 783 // Build a switch-out if we need it: 784 // - if there are branch-afters threaded through the scope 785 // - if fall-through is a branch-after 786 // - if there are fixups that have nowhere left to go and 787 // so must be immediately resolved 788 } else if (Scope.getNumBranchAfters() || 789 (HasFallthrough && !FallthroughIsBranchThrough) || 790 (HasFixups && !HasEnclosingCleanups)) { 791 792 llvm::BasicBlock *Default = 793 (BranchThroughDest ? BranchThroughDest : getUnreachableBlock()); 794 795 // TODO: base this on the number of branch-afters and fixups 796 const unsigned SwitchCapacity = 10; 797 798 llvm::LoadInst *Load = 799 createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest", 800 nullptr); 801 llvm::SwitchInst *Switch = 802 llvm::SwitchInst::Create(Load, Default, SwitchCapacity); 803 804 InstsToAppend.push_back(Load); 805 InstsToAppend.push_back(Switch); 806 807 // Branch-after fallthrough. 808 if (FallthroughSource && !FallthroughIsBranchThrough) { 809 FallthroughDest = createBasicBlock("cleanup.cont"); 810 if (HasFallthrough) 811 Switch->addCase(Builder.getInt32(0), FallthroughDest); 812 } 813 814 for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) { 815 Switch->addCase(Scope.getBranchAfterIndex(I), 816 Scope.getBranchAfterBlock(I)); 817 } 818 819 // If there aren't any enclosing cleanups, we can resolve all 820 // the fixups now. 821 if (HasFixups && !HasEnclosingCleanups) 822 ResolveAllBranchFixups(*this, Switch, NormalEntry); 823 } else { 824 // We should always have a branch-through destination in this case. 825 assert(BranchThroughDest); 826 InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest)); 827 } 828 829 // IV. Pop the cleanup and emit it. 830 EHStack.popCleanup(); 831 assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups); 832 833 EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag); 834 835 // Append the prepared cleanup prologue from above. 836 llvm::BasicBlock *NormalExit = Builder.GetInsertBlock(); 837 for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I) 838 NormalExit->getInstList().push_back(InstsToAppend[I]); 839 840 // Optimistically hope that any fixups will continue falling through. 841 for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); 842 I < E; ++I) { 843 BranchFixup &Fixup = EHStack.getBranchFixup(I); 844 if (!Fixup.Destination) continue; 845 if (!Fixup.OptimisticBranchBlock) { 846 createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex), 847 getNormalCleanupDestSlot(), 848 Fixup.InitialBranch); 849 Fixup.InitialBranch->setSuccessor(0, NormalEntry); 850 } 851 Fixup.OptimisticBranchBlock = NormalExit; 852 } 853 854 // V. Set up the fallthrough edge out. 855 856 // Case 1: a fallthrough source exists but doesn't branch to the 857 // cleanup because the cleanup is inactive. 858 if (!HasFallthrough && FallthroughSource) { 859 // Prebranched fallthrough was forwarded earlier. 860 // Non-prebranched fallthrough doesn't need to be forwarded. 861 // Either way, all we need to do is restore the IP we cleared before. 862 assert(!IsActive); 863 Builder.restoreIP(savedInactiveFallthroughIP); 864 865 // Case 2: a fallthrough source exists and should branch to the 866 // cleanup, but we're not supposed to branch through to the next 867 // cleanup. 868 } else if (HasFallthrough && FallthroughDest) { 869 assert(!FallthroughIsBranchThrough); 870 EmitBlock(FallthroughDest); 871 872 // Case 3: a fallthrough source exists and should branch to the 873 // cleanup and then through to the next. 874 } else if (HasFallthrough) { 875 // Everything is already set up for this. 876 877 // Case 4: no fallthrough source exists. 878 } else { 879 Builder.ClearInsertionPoint(); 880 } 881 882 // VI. Assorted cleaning. 883 884 // Check whether we can merge NormalEntry into a single predecessor. 885 // This might invalidate (non-IR) pointers to NormalEntry. 886 llvm::BasicBlock *NewNormalEntry = 887 SimplifyCleanupEntry(*this, NormalEntry); 888 889 // If it did invalidate those pointers, and NormalEntry was the same 890 // as NormalExit, go back and patch up the fixups. 891 if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit) 892 for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); 893 I < E; ++I) 894 EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry; 895 } 896 } 897 898 assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0); 899 900 // Emit the EH cleanup if required. 901 if (RequiresEHCleanup) { 902 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 903 904 EmitBlock(EHEntry); 905 906 llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent); 907 908 // Push a terminate scope or cleanupendpad scope around the potentially 909 // throwing cleanups. For funclet EH personalities, the cleanupendpad models 910 // program termination when cleanups throw. 911 bool PushedTerminate = false; 912 SaveAndRestore<bool> RestoreIsCleanupPadScope(IsCleanupPadScope); 913 llvm::CleanupPadInst *CPI = nullptr; 914 llvm::BasicBlock *CleanupEndBB = nullptr; 915 if (!EHPersonality::get(*this).usesFuncletPads()) { 916 EHStack.pushTerminate(); 917 PushedTerminate = true; 918 } else { 919 CPI = Builder.CreateCleanupPad({}); 920 921 // Build a cleanupendpad to unwind through. Our insertion point should be 922 // in the cleanuppad block. 923 CleanupEndBB = createBasicBlock("ehcleanup.end"); 924 CGBuilderTy(*this, CleanupEndBB).CreateCleanupEndPad(CPI, NextAction); 925 EHStack.pushPadEnd(CleanupEndBB); 926 927 // Mark that we're inside a cleanuppad to block inlining. 928 // FIXME: Remove this once the inliner knows when it's safe to do so. 929 IsCleanupPadScope = true; 930 } 931 932 // We only actually emit the cleanup code if the cleanup is either 933 // active or was used before it was deactivated. 934 if (EHActiveFlag.isValid() || IsActive) { 935 cleanupFlags.setIsForEHCleanup(); 936 EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag); 937 } 938 939 if (CPI) 940 Builder.CreateCleanupRet(CPI, NextAction); 941 else 942 Builder.CreateBr(NextAction); 943 944 // Insert the cleanupendpad block here, if it has any uses. 945 if (CleanupEndBB) { 946 EHStack.popPadEnd(); 947 if (CleanupEndBB->hasNUsesOrMore(1)) { 948 CurFn->getBasicBlockList().insertAfter( 949 Builder.GetInsertBlock()->getIterator(), CleanupEndBB); 950 } else { 951 delete CleanupEndBB; 952 } 953 } 954 955 // Leave the terminate scope. 956 if (PushedTerminate) 957 EHStack.popTerminate(); 958 959 Builder.restoreIP(SavedIP); 960 961 SimplifyCleanupEntry(*this, EHEntry); 962 } 963 } 964 965 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the 966 /// specified destination obviously has no cleanups to run. 'false' is always 967 /// a conservatively correct answer for this method. 968 bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const { 969 assert(Dest.getScopeDepth().encloses(EHStack.stable_begin()) 970 && "stale jump destination"); 971 972 // Calculate the innermost active normal cleanup. 973 EHScopeStack::stable_iterator TopCleanup = 974 EHStack.getInnermostActiveNormalCleanup(); 975 976 // If we're not in an active normal cleanup scope, or if the 977 // destination scope is within the innermost active normal cleanup 978 // scope, we don't need to worry about fixups. 979 if (TopCleanup == EHStack.stable_end() || 980 TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid 981 return true; 982 983 // Otherwise, we might need some cleanups. 984 return false; 985 } 986 987 988 /// Terminate the current block by emitting a branch which might leave 989 /// the current cleanup-protected scope. The target scope may not yet 990 /// be known, in which case this will require a fixup. 991 /// 992 /// As a side-effect, this method clears the insertion point. 993 void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { 994 assert(Dest.getScopeDepth().encloses(EHStack.stable_begin()) 995 && "stale jump destination"); 996 997 if (!HaveInsertPoint()) 998 return; 999 1000 // Create the branch. 1001 llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock()); 1002 1003 // Calculate the innermost active normal cleanup. 1004 EHScopeStack::stable_iterator 1005 TopCleanup = EHStack.getInnermostActiveNormalCleanup(); 1006 1007 // If we're not in an active normal cleanup scope, or if the 1008 // destination scope is within the innermost active normal cleanup 1009 // scope, we don't need to worry about fixups. 1010 if (TopCleanup == EHStack.stable_end() || 1011 TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid 1012 Builder.ClearInsertionPoint(); 1013 return; 1014 } 1015 1016 // If we can't resolve the destination cleanup scope, just add this 1017 // to the current cleanup scope as a branch fixup. 1018 if (!Dest.getScopeDepth().isValid()) { 1019 BranchFixup &Fixup = EHStack.addBranchFixup(); 1020 Fixup.Destination = Dest.getBlock(); 1021 Fixup.DestinationIndex = Dest.getDestIndex(); 1022 Fixup.InitialBranch = BI; 1023 Fixup.OptimisticBranchBlock = nullptr; 1024 1025 Builder.ClearInsertionPoint(); 1026 return; 1027 } 1028 1029 // Otherwise, thread through all the normal cleanups in scope. 1030 1031 // Store the index at the start. 1032 llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex()); 1033 createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI); 1034 1035 // Adjust BI to point to the first cleanup block. 1036 { 1037 EHCleanupScope &Scope = 1038 cast<EHCleanupScope>(*EHStack.find(TopCleanup)); 1039 BI->setSuccessor(0, CreateNormalEntry(*this, Scope)); 1040 } 1041 1042 // Add this destination to all the scopes involved. 1043 EHScopeStack::stable_iterator I = TopCleanup; 1044 EHScopeStack::stable_iterator E = Dest.getScopeDepth(); 1045 if (E.strictlyEncloses(I)) { 1046 while (true) { 1047 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I)); 1048 assert(Scope.isNormalCleanup()); 1049 I = Scope.getEnclosingNormalCleanup(); 1050 1051 // If this is the last cleanup we're propagating through, tell it 1052 // that there's a resolved jump moving through it. 1053 if (!E.strictlyEncloses(I)) { 1054 Scope.addBranchAfter(Index, Dest.getBlock()); 1055 break; 1056 } 1057 1058 // Otherwise, tell the scope that there's a jump propoagating 1059 // through it. If this isn't new information, all the rest of 1060 // the work has been done before. 1061 if (!Scope.addBranchThrough(Dest.getBlock())) 1062 break; 1063 } 1064 } 1065 1066 Builder.ClearInsertionPoint(); 1067 } 1068 1069 static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack, 1070 EHScopeStack::stable_iterator C) { 1071 // If we needed a normal block for any reason, that counts. 1072 if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock()) 1073 return true; 1074 1075 // Check whether any enclosed cleanups were needed. 1076 for (EHScopeStack::stable_iterator 1077 I = EHStack.getInnermostNormalCleanup(); 1078 I != C; ) { 1079 assert(C.strictlyEncloses(I)); 1080 EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I)); 1081 if (S.getNormalBlock()) return true; 1082 I = S.getEnclosingNormalCleanup(); 1083 } 1084 1085 return false; 1086 } 1087 1088 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack, 1089 EHScopeStack::stable_iterator cleanup) { 1090 // If we needed an EH block for any reason, that counts. 1091 if (EHStack.find(cleanup)->hasEHBranches()) 1092 return true; 1093 1094 // Check whether any enclosed cleanups were needed. 1095 for (EHScopeStack::stable_iterator 1096 i = EHStack.getInnermostEHScope(); i != cleanup; ) { 1097 assert(cleanup.strictlyEncloses(i)); 1098 1099 EHScope &scope = *EHStack.find(i); 1100 if (scope.hasEHBranches()) 1101 return true; 1102 1103 i = scope.getEnclosingEHScope(); 1104 } 1105 1106 return false; 1107 } 1108 1109 enum ForActivation_t { 1110 ForActivation, 1111 ForDeactivation 1112 }; 1113 1114 /// The given cleanup block is changing activation state. Configure a 1115 /// cleanup variable if necessary. 1116 /// 1117 /// It would be good if we had some way of determining if there were 1118 /// extra uses *after* the change-over point. 1119 static void SetupCleanupBlockActivation(CodeGenFunction &CGF, 1120 EHScopeStack::stable_iterator C, 1121 ForActivation_t kind, 1122 llvm::Instruction *dominatingIP) { 1123 EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C)); 1124 1125 // We always need the flag if we're activating the cleanup in a 1126 // conditional context, because we have to assume that the current 1127 // location doesn't necessarily dominate the cleanup's code. 1128 bool isActivatedInConditional = 1129 (kind == ForActivation && CGF.isInConditionalBranch()); 1130 1131 bool needFlag = false; 1132 1133 // Calculate whether the cleanup was used: 1134 1135 // - as a normal cleanup 1136 if (Scope.isNormalCleanup() && 1137 (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) { 1138 Scope.setTestFlagInNormalCleanup(); 1139 needFlag = true; 1140 } 1141 1142 // - as an EH cleanup 1143 if (Scope.isEHCleanup() && 1144 (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) { 1145 Scope.setTestFlagInEHCleanup(); 1146 needFlag = true; 1147 } 1148 1149 // If it hasn't yet been used as either, we're done. 1150 if (!needFlag) return; 1151 1152 Address var = Scope.getActiveFlag(); 1153 if (!var.isValid()) { 1154 var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(), 1155 "cleanup.isactive"); 1156 Scope.setActiveFlag(var); 1157 1158 assert(dominatingIP && "no existing variable and no dominating IP!"); 1159 1160 // Initialize to true or false depending on whether it was 1161 // active up to this point. 1162 llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation); 1163 1164 // If we're in a conditional block, ignore the dominating IP and 1165 // use the outermost conditional branch. 1166 if (CGF.isInConditionalBranch()) { 1167 CGF.setBeforeOutermostConditional(value, var); 1168 } else { 1169 createStoreInstBefore(value, var, dominatingIP); 1170 } 1171 } 1172 1173 CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var); 1174 } 1175 1176 /// Activate a cleanup that was created in an inactivated state. 1177 void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C, 1178 llvm::Instruction *dominatingIP) { 1179 assert(C != EHStack.stable_end() && "activating bottom of stack?"); 1180 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C)); 1181 assert(!Scope.isActive() && "double activation"); 1182 1183 SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP); 1184 1185 Scope.setActive(true); 1186 } 1187 1188 /// Deactive a cleanup that was created in an active state. 1189 void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C, 1190 llvm::Instruction *dominatingIP) { 1191 assert(C != EHStack.stable_end() && "deactivating bottom of stack?"); 1192 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C)); 1193 assert(Scope.isActive() && "double deactivation"); 1194 1195 // If it's the top of the stack, just pop it. 1196 if (C == EHStack.stable_begin()) { 1197 // If it's a normal cleanup, we need to pretend that the 1198 // fallthrough is unreachable. 1199 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1200 PopCleanupBlock(); 1201 Builder.restoreIP(SavedIP); 1202 return; 1203 } 1204 1205 // Otherwise, follow the general case. 1206 SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP); 1207 1208 Scope.setActive(false); 1209 } 1210 1211 Address CodeGenFunction::getNormalCleanupDestSlot() { 1212 if (!NormalCleanupDest) 1213 NormalCleanupDest = 1214 CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot"); 1215 return Address(NormalCleanupDest, CharUnits::fromQuantity(4)); 1216 } 1217 1218 /// Emits all the code to cause the given temporary to be cleaned up. 1219 void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary, 1220 QualType TempType, 1221 Address Ptr) { 1222 pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject, 1223 /*useEHCleanup*/ true); 1224 } 1225