1 //===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of coroutines. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGCleanup.h" 15 #include "CodeGenFunction.h" 16 #include "llvm/ADT/ScopeExit.h" 17 #include "clang/AST/StmtCXX.h" 18 #include "clang/AST/StmtVisitor.h" 19 20 using namespace clang; 21 using namespace CodeGen; 22 23 using llvm::Value; 24 using llvm::BasicBlock; 25 26 namespace { 27 enum class AwaitKind { Init, Normal, Yield, Final }; 28 static constexpr llvm::StringLiteral AwaitKindStr[] = {"init", "await", "yield", 29 "final"}; 30 } 31 32 struct clang::CodeGen::CGCoroData { 33 // What is the current await expression kind and how many 34 // await/yield expressions were encountered so far. 35 // These are used to generate pretty labels for await expressions in LLVM IR. 36 AwaitKind CurrentAwaitKind = AwaitKind::Init; 37 unsigned AwaitNum = 0; 38 unsigned YieldNum = 0; 39 40 // How many co_return statements are in the coroutine. Used to decide whether 41 // we need to add co_return; equivalent at the end of the user authored body. 42 unsigned CoreturnCount = 0; 43 44 // A branch to this block is emitted when coroutine needs to suspend. 45 llvm::BasicBlock *SuspendBB = nullptr; 46 47 // The promise type's 'unhandled_exception' handler, if it defines one. 48 Stmt *ExceptionHandler = nullptr; 49 50 // A temporary i1 alloca that stores whether 'await_resume' threw an 51 // exception. If it did, 'true' is stored in this variable, and the coroutine 52 // body must be skipped. If the promise type does not define an exception 53 // handler, this is null. 54 llvm::Value *ResumeEHVar = nullptr; 55 56 // Stores the jump destination just before the coroutine memory is freed. 57 // This is the destination that every suspend point jumps to for the cleanup 58 // branch. 59 CodeGenFunction::JumpDest CleanupJD; 60 61 // Stores the jump destination just before the final suspend. The co_return 62 // statements jumps to this point after calling return_xxx promise member. 63 CodeGenFunction::JumpDest FinalJD; 64 65 // Stores the llvm.coro.id emitted in the function so that we can supply it 66 // as the first argument to coro.begin, coro.alloc and coro.free intrinsics. 67 // Note: llvm.coro.id returns a token that cannot be directly expressed in a 68 // builtin. 69 llvm::CallInst *CoroId = nullptr; 70 71 // Stores the llvm.coro.begin emitted in the function so that we can replace 72 // all coro.frame intrinsics with direct SSA value of coro.begin that returns 73 // the address of the coroutine frame of the current coroutine. 74 llvm::CallInst *CoroBegin = nullptr; 75 76 // Stores the last emitted coro.free for the deallocate expressions, we use it 77 // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem). 78 llvm::CallInst *LastCoroFree = nullptr; 79 80 // If coro.id came from the builtin, remember the expression to give better 81 // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by 82 // EmitCoroutineBody. 83 CallExpr const *CoroIdExpr = nullptr; 84 }; 85 86 // Defining these here allows to keep CGCoroData private to this file. 87 clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {} 88 CodeGenFunction::CGCoroInfo::~CGCoroInfo() {} 89 90 static void createCoroData(CodeGenFunction &CGF, 91 CodeGenFunction::CGCoroInfo &CurCoro, 92 llvm::CallInst *CoroId, 93 CallExpr const *CoroIdExpr = nullptr) { 94 if (CurCoro.Data) { 95 if (CurCoro.Data->CoroIdExpr) 96 CGF.CGM.Error(CoroIdExpr->getLocStart(), 97 "only one __builtin_coro_id can be used in a function"); 98 else if (CoroIdExpr) 99 CGF.CGM.Error(CoroIdExpr->getLocStart(), 100 "__builtin_coro_id shall not be used in a C++ coroutine"); 101 else 102 llvm_unreachable("EmitCoroutineBodyStatement called twice?"); 103 104 return; 105 } 106 107 CurCoro.Data = std::unique_ptr<CGCoroData>(new CGCoroData); 108 CurCoro.Data->CoroId = CoroId; 109 CurCoro.Data->CoroIdExpr = CoroIdExpr; 110 } 111 112 // Synthesize a pretty name for a suspend point. 113 static SmallString<32> buildSuspendPrefixStr(CGCoroData &Coro, AwaitKind Kind) { 114 unsigned No = 0; 115 switch (Kind) { 116 case AwaitKind::Init: 117 case AwaitKind::Final: 118 break; 119 case AwaitKind::Normal: 120 No = ++Coro.AwaitNum; 121 break; 122 case AwaitKind::Yield: 123 No = ++Coro.YieldNum; 124 break; 125 } 126 SmallString<32> Prefix(AwaitKindStr[static_cast<unsigned>(Kind)]); 127 if (No > 1) { 128 Twine(No).toVector(Prefix); 129 } 130 return Prefix; 131 } 132 133 // Emit suspend expression which roughly looks like: 134 // 135 // auto && x = CommonExpr(); 136 // if (!x.await_ready()) { 137 // llvm_coro_save(); 138 // x.await_suspend(...); (*) 139 // llvm_coro_suspend(); (**) 140 // } 141 // x.await_resume(); 142 // 143 // where the result of the entire expression is the result of x.await_resume() 144 // 145 // (*) If x.await_suspend return type is bool, it allows to veto a suspend: 146 // if (x.await_suspend(...)) 147 // llvm_coro_suspend(); 148 // 149 // (**) llvm_coro_suspend() encodes three possible continuations as 150 // a switch instruction: 151 // 152 // %where-to = call i8 @llvm.coro.suspend(...) 153 // switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend 154 // i8 0, label %yield.ready ; go here when resumed 155 // i8 1, label %yield.cleanup ; go here when destroyed 156 // ] 157 // 158 // See llvm's docs/Coroutines.rst for more details. 159 // 160 namespace { 161 struct LValueOrRValue { 162 LValue LV; 163 RValue RV; 164 }; 165 } 166 static LValueOrRValue emitSuspendExpression(CodeGenFunction &CGF, CGCoroData &Coro, 167 CoroutineSuspendExpr const &S, 168 AwaitKind Kind, AggValueSlot aggSlot, 169 bool ignoreResult, bool forLValue) { 170 auto *E = S.getCommonExpr(); 171 172 auto Binder = 173 CodeGenFunction::OpaqueValueMappingData::bind(CGF, S.getOpaqueValue(), E); 174 auto UnbindOnExit = llvm::make_scope_exit([&] { Binder.unbind(CGF); }); 175 176 auto Prefix = buildSuspendPrefixStr(Coro, Kind); 177 BasicBlock *ReadyBlock = CGF.createBasicBlock(Prefix + Twine(".ready")); 178 BasicBlock *SuspendBlock = CGF.createBasicBlock(Prefix + Twine(".suspend")); 179 BasicBlock *CleanupBlock = CGF.createBasicBlock(Prefix + Twine(".cleanup")); 180 181 // If expression is ready, no need to suspend. 182 CGF.EmitBranchOnBoolExpr(S.getReadyExpr(), ReadyBlock, SuspendBlock, 0); 183 184 // Otherwise, emit suspend logic. 185 CGF.EmitBlock(SuspendBlock); 186 187 auto &Builder = CGF.Builder; 188 llvm::Function *CoroSave = CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_save); 189 auto *NullPtr = llvm::ConstantPointerNull::get(CGF.CGM.Int8PtrTy); 190 auto *SaveCall = Builder.CreateCall(CoroSave, {NullPtr}); 191 192 auto *SuspendRet = CGF.EmitScalarExpr(S.getSuspendExpr()); 193 if (SuspendRet != nullptr && SuspendRet->getType()->isIntegerTy(1)) { 194 // Veto suspension if requested by bool returning await_suspend. 195 BasicBlock *RealSuspendBlock = 196 CGF.createBasicBlock(Prefix + Twine(".suspend.bool")); 197 CGF.Builder.CreateCondBr(SuspendRet, RealSuspendBlock, ReadyBlock); 198 SuspendBlock = RealSuspendBlock; 199 CGF.EmitBlock(RealSuspendBlock); 200 } 201 202 // Emit the suspend point. 203 const bool IsFinalSuspend = (Kind == AwaitKind::Final); 204 llvm::Function *CoroSuspend = 205 CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_suspend); 206 auto *SuspendResult = Builder.CreateCall( 207 CoroSuspend, {SaveCall, Builder.getInt1(IsFinalSuspend)}); 208 209 // Create a switch capturing three possible continuations. 210 auto *Switch = Builder.CreateSwitch(SuspendResult, Coro.SuspendBB, 2); 211 Switch->addCase(Builder.getInt8(0), ReadyBlock); 212 Switch->addCase(Builder.getInt8(1), CleanupBlock); 213 214 // Emit cleanup for this suspend point. 215 CGF.EmitBlock(CleanupBlock); 216 CGF.EmitBranchThroughCleanup(Coro.CleanupJD); 217 218 // Emit await_resume expression. 219 CGF.EmitBlock(ReadyBlock); 220 CXXTryStmt *TryStmt = nullptr; 221 if (Coro.ExceptionHandler && Kind == AwaitKind::Init) { 222 Coro.ResumeEHVar = 223 CGF.CreateTempAlloca(Builder.getInt1Ty(), Prefix + Twine("resume.eh")); 224 Builder.CreateFlagStore(true, Coro.ResumeEHVar); 225 226 auto Loc = S.getResumeExpr()->getExprLoc(); 227 auto *Catch = new (CGF.getContext()) 228 CXXCatchStmt(Loc, /*exDecl=*/nullptr, Coro.ExceptionHandler); 229 auto *TryBody = 230 CompoundStmt::Create(CGF.getContext(), S.getResumeExpr(), Loc, Loc); 231 TryStmt = CXXTryStmt::Create(CGF.getContext(), Loc, TryBody, Catch); 232 CGF.EnterCXXTryStmt(*TryStmt); 233 } 234 235 LValueOrRValue Res; 236 if (forLValue) 237 Res.LV = CGF.EmitLValue(S.getResumeExpr()); 238 else 239 Res.RV = CGF.EmitAnyExpr(S.getResumeExpr(), aggSlot, ignoreResult); 240 241 if (TryStmt) { 242 Builder.CreateFlagStore(false, Coro.ResumeEHVar); 243 CGF.ExitCXXTryStmt(*TryStmt); 244 } 245 246 return Res; 247 } 248 249 RValue CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr &E, 250 AggValueSlot aggSlot, 251 bool ignoreResult) { 252 return emitSuspendExpression(*this, *CurCoro.Data, E, 253 CurCoro.Data->CurrentAwaitKind, aggSlot, 254 ignoreResult, /*forLValue*/false).RV; 255 } 256 RValue CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr &E, 257 AggValueSlot aggSlot, 258 bool ignoreResult) { 259 return emitSuspendExpression(*this, *CurCoro.Data, E, AwaitKind::Yield, 260 aggSlot, ignoreResult, /*forLValue*/false).RV; 261 } 262 263 void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt const &S) { 264 ++CurCoro.Data->CoreturnCount; 265 const Expr *RV = S.getOperand(); 266 if (RV && RV->getType()->isVoidType()) { 267 // Make sure to evaluate the expression of a co_return with a void 268 // expression for side effects. 269 RunCleanupsScope cleanupScope(*this); 270 EmitIgnoredExpr(RV); 271 } 272 EmitStmt(S.getPromiseCall()); 273 EmitBranchThroughCleanup(CurCoro.Data->FinalJD); 274 } 275 276 277 #ifndef NDEBUG 278 static QualType getCoroutineSuspendExprReturnType(const ASTContext &Ctx, 279 const CoroutineSuspendExpr *E) { 280 const auto *RE = E->getResumeExpr(); 281 // Is it possible for RE to be a CXXBindTemporaryExpr wrapping 282 // a MemberCallExpr? 283 assert(isa<CallExpr>(RE) && "unexpected suspend expression type"); 284 return cast<CallExpr>(RE)->getCallReturnType(Ctx); 285 } 286 #endif 287 288 LValue 289 CodeGenFunction::EmitCoawaitLValue(const CoawaitExpr *E) { 290 assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && 291 "Can't have a scalar return unless the return type is a " 292 "reference type!"); 293 return emitSuspendExpression(*this, *CurCoro.Data, *E, 294 CurCoro.Data->CurrentAwaitKind, AggValueSlot::ignored(), 295 /*ignoreResult*/false, /*forLValue*/true).LV; 296 } 297 298 LValue 299 CodeGenFunction::EmitCoyieldLValue(const CoyieldExpr *E) { 300 assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() && 301 "Can't have a scalar return unless the return type is a " 302 "reference type!"); 303 return emitSuspendExpression(*this, *CurCoro.Data, *E, 304 AwaitKind::Yield, AggValueSlot::ignored(), 305 /*ignoreResult*/false, /*forLValue*/true).LV; 306 } 307 308 // Hunts for the parameter reference in the parameter copy/move declaration. 309 namespace { 310 struct GetParamRef : public StmtVisitor<GetParamRef> { 311 public: 312 DeclRefExpr *Expr = nullptr; 313 GetParamRef() {} 314 void VisitDeclRefExpr(DeclRefExpr *E) { 315 assert(Expr == nullptr && "multilple declref in param move"); 316 Expr = E; 317 } 318 void VisitStmt(Stmt *S) { 319 for (auto *C : S->children()) { 320 if (C) 321 Visit(C); 322 } 323 } 324 }; 325 } 326 327 // This class replaces references to parameters to their copies by changing 328 // the addresses in CGF.LocalDeclMap and restoring back the original values in 329 // its destructor. 330 331 namespace { 332 struct ParamReferenceReplacerRAII { 333 CodeGenFunction::DeclMapTy SavedLocals; 334 CodeGenFunction::DeclMapTy& LocalDeclMap; 335 336 ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy &LocalDeclMap) 337 : LocalDeclMap(LocalDeclMap) {} 338 339 void addCopy(DeclStmt const *PM) { 340 // Figure out what param it refers to. 341 342 assert(PM->isSingleDecl()); 343 VarDecl const*VD = static_cast<VarDecl const*>(PM->getSingleDecl()); 344 Expr const *InitExpr = VD->getInit(); 345 GetParamRef Visitor; 346 Visitor.Visit(const_cast<Expr*>(InitExpr)); 347 assert(Visitor.Expr); 348 DeclRefExpr *DREOrig = Visitor.Expr; 349 auto *PD = DREOrig->getDecl(); 350 351 auto it = LocalDeclMap.find(PD); 352 assert(it != LocalDeclMap.end() && "parameter is not found"); 353 SavedLocals.insert({ PD, it->second }); 354 355 auto copyIt = LocalDeclMap.find(VD); 356 assert(copyIt != LocalDeclMap.end() && "parameter copy is not found"); 357 it->second = copyIt->getSecond(); 358 } 359 360 ~ParamReferenceReplacerRAII() { 361 for (auto&& SavedLocal : SavedLocals) { 362 LocalDeclMap.insert({SavedLocal.first, SavedLocal.second}); 363 } 364 } 365 }; 366 } 367 368 // For WinEH exception representation backend needs to know what funclet coro.end 369 // belongs to. That information is passed in a funclet bundle. 370 static SmallVector<llvm::OperandBundleDef, 1> 371 getBundlesForCoroEnd(CodeGenFunction &CGF) { 372 SmallVector<llvm::OperandBundleDef, 1> BundleList; 373 374 if (llvm::Instruction *EHPad = CGF.CurrentFuncletPad) 375 BundleList.emplace_back("funclet", EHPad); 376 377 return BundleList; 378 } 379 380 namespace { 381 // We will insert coro.end to cut any of the destructors for objects that 382 // do not need to be destroyed once the coroutine is resumed. 383 // See llvm/docs/Coroutines.rst for more details about coro.end. 384 struct CallCoroEnd final : public EHScopeStack::Cleanup { 385 void Emit(CodeGenFunction &CGF, Flags flags) override { 386 auto &CGM = CGF.CGM; 387 auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy); 388 llvm::Function *CoroEndFn = CGM.getIntrinsic(llvm::Intrinsic::coro_end); 389 // See if we have a funclet bundle to associate coro.end with. (WinEH) 390 auto Bundles = getBundlesForCoroEnd(CGF); 391 auto *CoroEnd = CGF.Builder.CreateCall( 392 CoroEndFn, {NullPtr, CGF.Builder.getTrue()}, Bundles); 393 if (Bundles.empty()) { 394 // Otherwise, (landingpad model), create a conditional branch that leads 395 // either to a cleanup block or a block with EH resume instruction. 396 auto *ResumeBB = CGF.getEHResumeBlock(/*cleanup=*/true); 397 auto *CleanupContBB = CGF.createBasicBlock("cleanup.cont"); 398 CGF.Builder.CreateCondBr(CoroEnd, ResumeBB, CleanupContBB); 399 CGF.EmitBlock(CleanupContBB); 400 } 401 } 402 }; 403 } 404 405 namespace { 406 // Make sure to call coro.delete on scope exit. 407 struct CallCoroDelete final : public EHScopeStack::Cleanup { 408 Stmt *Deallocate; 409 410 // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;" 411 412 // Note: That deallocation will be emitted twice: once for a normal exit and 413 // once for exceptional exit. This usage is safe because Deallocate does not 414 // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr() 415 // builds a single call to a deallocation function which is safe to emit 416 // multiple times. 417 void Emit(CodeGenFunction &CGF, Flags) override { 418 // Remember the current point, as we are going to emit deallocation code 419 // first to get to coro.free instruction that is an argument to a delete 420 // call. 421 BasicBlock *SaveInsertBlock = CGF.Builder.GetInsertBlock(); 422 423 auto *FreeBB = CGF.createBasicBlock("coro.free"); 424 CGF.EmitBlock(FreeBB); 425 CGF.EmitStmt(Deallocate); 426 427 auto *AfterFreeBB = CGF.createBasicBlock("after.coro.free"); 428 CGF.EmitBlock(AfterFreeBB); 429 430 // We should have captured coro.free from the emission of deallocate. 431 auto *CoroFree = CGF.CurCoro.Data->LastCoroFree; 432 if (!CoroFree) { 433 CGF.CGM.Error(Deallocate->getLocStart(), 434 "Deallocation expressoin does not refer to coro.free"); 435 return; 436 } 437 438 // Get back to the block we were originally and move coro.free there. 439 auto *InsertPt = SaveInsertBlock->getTerminator(); 440 CoroFree->moveBefore(InsertPt); 441 CGF.Builder.SetInsertPoint(InsertPt); 442 443 // Add if (auto *mem = coro.free) Deallocate; 444 auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy); 445 auto *Cond = CGF.Builder.CreateICmpNE(CoroFree, NullPtr); 446 CGF.Builder.CreateCondBr(Cond, FreeBB, AfterFreeBB); 447 448 // No longer need old terminator. 449 InsertPt->eraseFromParent(); 450 CGF.Builder.SetInsertPoint(AfterFreeBB); 451 } 452 explicit CallCoroDelete(Stmt *DeallocStmt) : Deallocate(DeallocStmt) {} 453 }; 454 } 455 456 namespace { 457 struct GetReturnObjectManager { 458 CodeGenFunction &CGF; 459 CGBuilderTy &Builder; 460 const CoroutineBodyStmt &S; 461 462 Address GroActiveFlag; 463 CodeGenFunction::AutoVarEmission GroEmission; 464 465 GetReturnObjectManager(CodeGenFunction &CGF, const CoroutineBodyStmt &S) 466 : CGF(CGF), Builder(CGF.Builder), S(S), GroActiveFlag(Address::invalid()), 467 GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {} 468 469 // The gro variable has to outlive coroutine frame and coroutine promise, but, 470 // it can only be initialized after coroutine promise was created, thus, we 471 // split its emission in two parts. EmitGroAlloca emits an alloca and sets up 472 // cleanups. Later when coroutine promise is available we initialize the gro 473 // and sets the flag that the cleanup is now active. 474 475 void EmitGroAlloca() { 476 auto *GroDeclStmt = dyn_cast<DeclStmt>(S.getResultDecl()); 477 if (!GroDeclStmt) { 478 // If get_return_object returns void, no need to do an alloca. 479 return; 480 } 481 482 auto *GroVarDecl = cast<VarDecl>(GroDeclStmt->getSingleDecl()); 483 484 // Set GRO flag that it is not initialized yet 485 GroActiveFlag = 486 CGF.CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(), "gro.active"); 487 Builder.CreateStore(Builder.getFalse(), GroActiveFlag); 488 489 GroEmission = CGF.EmitAutoVarAlloca(*GroVarDecl); 490 491 // Remember the top of EHStack before emitting the cleanup. 492 auto old_top = CGF.EHStack.stable_begin(); 493 CGF.EmitAutoVarCleanups(GroEmission); 494 auto top = CGF.EHStack.stable_begin(); 495 496 // Make the cleanup conditional on gro.active 497 for (auto b = CGF.EHStack.find(top), e = CGF.EHStack.find(old_top); 498 b != e; b++) { 499 if (auto *Cleanup = dyn_cast<EHCleanupScope>(&*b)) { 500 assert(!Cleanup->hasActiveFlag() && "cleanup already has active flag?"); 501 Cleanup->setActiveFlag(GroActiveFlag); 502 Cleanup->setTestFlagInEHCleanup(); 503 Cleanup->setTestFlagInNormalCleanup(); 504 } 505 } 506 } 507 508 void EmitGroInit() { 509 if (!GroActiveFlag.isValid()) { 510 // No Gro variable was allocated. Simply emit the call to 511 // get_return_object. 512 CGF.EmitStmt(S.getResultDecl()); 513 return; 514 } 515 516 CGF.EmitAutoVarInit(GroEmission); 517 Builder.CreateStore(Builder.getTrue(), GroActiveFlag); 518 } 519 }; 520 } 521 522 static void emitBodyAndFallthrough(CodeGenFunction &CGF, 523 const CoroutineBodyStmt &S, Stmt *Body) { 524 CGF.EmitStmt(Body); 525 const bool CanFallthrough = CGF.Builder.GetInsertBlock(); 526 if (CanFallthrough) 527 if (Stmt *OnFallthrough = S.getFallthroughHandler()) 528 CGF.EmitStmt(OnFallthrough); 529 } 530 531 void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt &S) { 532 auto *NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy()); 533 auto &TI = CGM.getContext().getTargetInfo(); 534 unsigned NewAlign = TI.getNewAlign() / TI.getCharWidth(); 535 536 auto *EntryBB = Builder.GetInsertBlock(); 537 auto *AllocBB = createBasicBlock("coro.alloc"); 538 auto *InitBB = createBasicBlock("coro.init"); 539 auto *FinalBB = createBasicBlock("coro.final"); 540 auto *RetBB = createBasicBlock("coro.ret"); 541 542 auto *CoroId = Builder.CreateCall( 543 CGM.getIntrinsic(llvm::Intrinsic::coro_id), 544 {Builder.getInt32(NewAlign), NullPtr, NullPtr, NullPtr}); 545 createCoroData(*this, CurCoro, CoroId); 546 CurCoro.Data->SuspendBB = RetBB; 547 548 // Backend is allowed to elide memory allocations, to help it, emit 549 // auto mem = coro.alloc() ? 0 : ... allocation code ...; 550 auto *CoroAlloc = Builder.CreateCall( 551 CGM.getIntrinsic(llvm::Intrinsic::coro_alloc), {CoroId}); 552 553 Builder.CreateCondBr(CoroAlloc, AllocBB, InitBB); 554 555 EmitBlock(AllocBB); 556 auto *AllocateCall = EmitScalarExpr(S.getAllocate()); 557 auto *AllocOrInvokeContBB = Builder.GetInsertBlock(); 558 559 // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided. 560 if (auto *RetOnAllocFailure = S.getReturnStmtOnAllocFailure()) { 561 auto *RetOnFailureBB = createBasicBlock("coro.ret.on.failure"); 562 563 // See if allocation was successful. 564 auto *NullPtr = llvm::ConstantPointerNull::get(Int8PtrTy); 565 auto *Cond = Builder.CreateICmpNE(AllocateCall, NullPtr); 566 Builder.CreateCondBr(Cond, InitBB, RetOnFailureBB); 567 568 // If not, return OnAllocFailure object. 569 EmitBlock(RetOnFailureBB); 570 EmitStmt(RetOnAllocFailure); 571 } 572 else { 573 Builder.CreateBr(InitBB); 574 } 575 576 EmitBlock(InitBB); 577 578 // Pass the result of the allocation to coro.begin. 579 auto *Phi = Builder.CreatePHI(VoidPtrTy, 2); 580 Phi->addIncoming(NullPtr, EntryBB); 581 Phi->addIncoming(AllocateCall, AllocOrInvokeContBB); 582 auto *CoroBegin = Builder.CreateCall( 583 CGM.getIntrinsic(llvm::Intrinsic::coro_begin), {CoroId, Phi}); 584 CurCoro.Data->CoroBegin = CoroBegin; 585 586 GetReturnObjectManager GroManager(*this, S); 587 GroManager.EmitGroAlloca(); 588 589 CurCoro.Data->CleanupJD = getJumpDestInCurrentScope(RetBB); 590 { 591 ParamReferenceReplacerRAII ParamReplacer(LocalDeclMap); 592 CodeGenFunction::RunCleanupsScope ResumeScope(*this); 593 EHStack.pushCleanup<CallCoroDelete>(NormalAndEHCleanup, S.getDeallocate()); 594 595 // Create parameter copies. We do it before creating a promise, since an 596 // evolution of coroutine TS may allow promise constructor to observe 597 // parameter copies. 598 for (auto *PM : S.getParamMoves()) { 599 EmitStmt(PM); 600 ParamReplacer.addCopy(cast<DeclStmt>(PM)); 601 // TODO: if(CoroParam(...)) need to surround ctor and dtor 602 // for the copy, so that llvm can elide it if the copy is 603 // not needed. 604 } 605 606 EmitStmt(S.getPromiseDeclStmt()); 607 608 Address PromiseAddr = GetAddrOfLocalVar(S.getPromiseDecl()); 609 auto *PromiseAddrVoidPtr = 610 new llvm::BitCastInst(PromiseAddr.getPointer(), VoidPtrTy, "", CoroId); 611 // Update CoroId to refer to the promise. We could not do it earlier because 612 // promise local variable was not emitted yet. 613 CoroId->setArgOperand(1, PromiseAddrVoidPtr); 614 615 // Now we have the promise, initialize the GRO 616 GroManager.EmitGroInit(); 617 618 EHStack.pushCleanup<CallCoroEnd>(EHCleanup); 619 620 CurCoro.Data->CurrentAwaitKind = AwaitKind::Init; 621 CurCoro.Data->ExceptionHandler = S.getExceptionHandler(); 622 EmitStmt(S.getInitSuspendStmt()); 623 CurCoro.Data->FinalJD = getJumpDestInCurrentScope(FinalBB); 624 625 CurCoro.Data->CurrentAwaitKind = AwaitKind::Normal; 626 627 if (CurCoro.Data->ExceptionHandler) { 628 BasicBlock *BodyBB = createBasicBlock("coro.resumed.body"); 629 BasicBlock *ContBB = createBasicBlock("coro.resumed.cont"); 630 Value *SkipBody = 631 Builder.CreateFlagLoad(CurCoro.Data->ResumeEHVar, "coro.resumed.eh"); 632 Builder.CreateCondBr(SkipBody, ContBB, BodyBB); 633 EmitBlock(BodyBB); 634 635 auto Loc = S.getLocStart(); 636 CXXCatchStmt Catch(Loc, /*exDecl=*/nullptr, 637 CurCoro.Data->ExceptionHandler); 638 auto *TryStmt = 639 CXXTryStmt::Create(getContext(), Loc, S.getBody(), &Catch); 640 641 EnterCXXTryStmt(*TryStmt); 642 emitBodyAndFallthrough(*this, S, TryStmt->getTryBlock()); 643 ExitCXXTryStmt(*TryStmt); 644 645 EmitBlock(ContBB); 646 } 647 else { 648 emitBodyAndFallthrough(*this, S, S.getBody()); 649 } 650 651 // See if we need to generate final suspend. 652 const bool CanFallthrough = Builder.GetInsertBlock(); 653 const bool HasCoreturns = CurCoro.Data->CoreturnCount > 0; 654 if (CanFallthrough || HasCoreturns) { 655 EmitBlock(FinalBB); 656 CurCoro.Data->CurrentAwaitKind = AwaitKind::Final; 657 EmitStmt(S.getFinalSuspendStmt()); 658 } else { 659 // We don't need FinalBB. Emit it to make sure the block is deleted. 660 EmitBlock(FinalBB, /*IsFinished=*/true); 661 } 662 } 663 664 EmitBlock(RetBB); 665 // Emit coro.end before getReturnStmt (and parameter destructors), since 666 // resume and destroy parts of the coroutine should not include them. 667 llvm::Function *CoroEnd = CGM.getIntrinsic(llvm::Intrinsic::coro_end); 668 Builder.CreateCall(CoroEnd, {NullPtr, Builder.getFalse()}); 669 670 if (Stmt *Ret = S.getReturnStmt()) 671 EmitStmt(Ret); 672 } 673 674 // Emit coroutine intrinsic and patch up arguments of the token type. 675 RValue CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr *E, 676 unsigned int IID) { 677 SmallVector<llvm::Value *, 8> Args; 678 switch (IID) { 679 default: 680 break; 681 // The coro.frame builtin is replaced with an SSA value of the coro.begin 682 // intrinsic. 683 case llvm::Intrinsic::coro_frame: { 684 if (CurCoro.Data && CurCoro.Data->CoroBegin) { 685 return RValue::get(CurCoro.Data->CoroBegin); 686 } 687 CGM.Error(E->getLocStart(), "this builtin expect that __builtin_coro_begin " 688 "has been used earlier in this function"); 689 auto NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy()); 690 return RValue::get(NullPtr); 691 } 692 // The following three intrinsics take a token parameter referring to a token 693 // returned by earlier call to @llvm.coro.id. Since we cannot represent it in 694 // builtins, we patch it up here. 695 case llvm::Intrinsic::coro_alloc: 696 case llvm::Intrinsic::coro_begin: 697 case llvm::Intrinsic::coro_free: { 698 if (CurCoro.Data && CurCoro.Data->CoroId) { 699 Args.push_back(CurCoro.Data->CoroId); 700 break; 701 } 702 CGM.Error(E->getLocStart(), "this builtin expect that __builtin_coro_id has" 703 " been used earlier in this function"); 704 // Fallthrough to the next case to add TokenNone as the first argument. 705 LLVM_FALLTHROUGH; 706 } 707 // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first 708 // argument. 709 case llvm::Intrinsic::coro_suspend: 710 Args.push_back(llvm::ConstantTokenNone::get(getLLVMContext())); 711 break; 712 } 713 for (auto &Arg : E->arguments()) 714 Args.push_back(EmitScalarExpr(Arg)); 715 716 llvm::Value *F = CGM.getIntrinsic(IID); 717 llvm::CallInst *Call = Builder.CreateCall(F, Args); 718 719 // Note: The following code is to enable to emit coro.id and coro.begin by 720 // hand to experiment with coroutines in C. 721 // If we see @llvm.coro.id remember it in the CoroData. We will update 722 // coro.alloc, coro.begin and coro.free intrinsics to refer to it. 723 if (IID == llvm::Intrinsic::coro_id) { 724 createCoroData(*this, CurCoro, Call, E); 725 } 726 else if (IID == llvm::Intrinsic::coro_begin) { 727 if (CurCoro.Data) 728 CurCoro.Data->CoroBegin = Call; 729 } 730 else if (IID == llvm::Intrinsic::coro_free) { 731 // Remember the last coro_free as we need it to build the conditional 732 // deletion of the coroutine frame. 733 if (CurCoro.Data) 734 CurCoro.Data->LastCoroFree = Call; 735 } 736 return RValue::get(Call); 737 } 738