1 //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with code generation of C++ expressions 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CodeGenFunction.h" 15 using namespace clang; 16 using namespace CodeGen; 17 18 static uint64_t CalculateCookiePadding(ASTContext &Ctx, QualType ElementType) { 19 const RecordType *RT = ElementType->getAs<RecordType>(); 20 if (!RT) 21 return 0; 22 23 const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl()); 24 if (!RD) 25 return 0; 26 27 // Check if the class has a trivial destructor. 28 if (RD->hasTrivialDestructor()) { 29 // Check if the usual deallocation function takes two arguments. 30 const CXXMethodDecl *UsualDeallocationFunction = 0; 31 32 DeclarationName OpName = 33 Ctx.DeclarationNames.getCXXOperatorName(OO_Array_Delete); 34 DeclContext::lookup_const_iterator Op, OpEnd; 35 for (llvm::tie(Op, OpEnd) = RD->lookup(OpName); 36 Op != OpEnd; ++Op) { 37 const CXXMethodDecl *Delete = cast<CXXMethodDecl>(*Op); 38 39 if (Delete->isUsualDeallocationFunction()) { 40 UsualDeallocationFunction = Delete; 41 break; 42 } 43 } 44 45 // No usual deallocation function, we don't need a cookie. 46 if (!UsualDeallocationFunction) 47 return 0; 48 49 // The usual deallocation function doesn't take a size_t argument, so we 50 // don't need a cookie. 51 if (UsualDeallocationFunction->getNumParams() == 1) 52 return 0; 53 54 assert(UsualDeallocationFunction->getNumParams() == 2 && 55 "Unexpected deallocation function type!"); 56 } 57 58 // Padding is the maximum of sizeof(size_t) and alignof(ElementType) 59 return std::max(Ctx.getTypeSize(Ctx.getSizeType()), 60 static_cast<uint64_t>(Ctx.getTypeAlign(ElementType))) / 8; 61 } 62 63 static uint64_t CalculateCookiePadding(ASTContext &Ctx, const CXXNewExpr *E) { 64 if (!E->isArray()) 65 return 0; 66 67 // No cookie is required if the new operator being used is 68 // ::operator new[](size_t, void*). 69 const FunctionDecl *OperatorNew = E->getOperatorNew(); 70 if (OperatorNew->getDeclContext()->getLookupContext()->isFileContext()) { 71 if (OperatorNew->getNumParams() == 2) { 72 CanQualType ParamType = 73 Ctx.getCanonicalType(OperatorNew->getParamDecl(1)->getType()); 74 75 if (ParamType == Ctx.VoidPtrTy) 76 return 0; 77 } 78 } 79 80 return CalculateCookiePadding(Ctx, E->getAllocatedType()); 81 QualType T = E->getAllocatedType(); 82 } 83 84 static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF, 85 const CXXNewExpr *E, 86 llvm::Value *& NumElements) { 87 QualType Type = E->getAllocatedType(); 88 uint64_t TypeSizeInBytes = CGF.getContext().getTypeSize(Type) / 8; 89 const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType()); 90 91 if (!E->isArray()) 92 return llvm::ConstantInt::get(SizeTy, TypeSizeInBytes); 93 94 uint64_t CookiePadding = CalculateCookiePadding(CGF.getContext(), E); 95 96 Expr::EvalResult Result; 97 if (E->getArraySize()->Evaluate(Result, CGF.getContext()) && 98 !Result.HasSideEffects && Result.Val.isInt()) { 99 100 uint64_t AllocSize = 101 Result.Val.getInt().getZExtValue() * TypeSizeInBytes + CookiePadding; 102 103 NumElements = 104 llvm::ConstantInt::get(SizeTy, Result.Val.getInt().getZExtValue()); 105 106 return llvm::ConstantInt::get(SizeTy, AllocSize); 107 } 108 109 // Emit the array size expression. 110 NumElements = CGF.EmitScalarExpr(E->getArraySize()); 111 112 // Multiply with the type size. 113 llvm::Value *V = 114 CGF.Builder.CreateMul(NumElements, 115 llvm::ConstantInt::get(SizeTy, TypeSizeInBytes)); 116 117 // And add the cookie padding if necessary. 118 if (CookiePadding) 119 V = CGF.Builder.CreateAdd(V, llvm::ConstantInt::get(SizeTy, CookiePadding)); 120 121 return V; 122 } 123 124 static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E, 125 llvm::Value *NewPtr, 126 llvm::Value *NumElements) { 127 if (E->isArray()) { 128 if (CXXConstructorDecl *Ctor = E->getConstructor()) 129 CGF.EmitCXXAggrConstructorCall(Ctor, NumElements, NewPtr, 130 E->constructor_arg_begin(), 131 E->constructor_arg_end()); 132 return; 133 } 134 135 QualType AllocType = E->getAllocatedType(); 136 137 if (CXXConstructorDecl *Ctor = E->getConstructor()) { 138 CGF.EmitCXXConstructorCall(Ctor, Ctor_Complete, NewPtr, 139 E->constructor_arg_begin(), 140 E->constructor_arg_end()); 141 142 return; 143 } 144 145 // We have a POD type. 146 if (E->getNumConstructorArgs() == 0) 147 return; 148 149 assert(E->getNumConstructorArgs() == 1 && 150 "Can only have one argument to initializer of POD type."); 151 152 const Expr *Init = E->getConstructorArg(0); 153 154 if (!CGF.hasAggregateLLVMType(AllocType)) 155 CGF.EmitStoreOfScalar(CGF.EmitScalarExpr(Init), NewPtr, 156 AllocType.isVolatileQualified(), AllocType); 157 else if (AllocType->isAnyComplexType()) 158 CGF.EmitComplexExprIntoAddr(Init, NewPtr, 159 AllocType.isVolatileQualified()); 160 else 161 CGF.EmitAggExpr(Init, NewPtr, AllocType.isVolatileQualified()); 162 } 163 164 llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) { 165 QualType AllocType = E->getAllocatedType(); 166 FunctionDecl *NewFD = E->getOperatorNew(); 167 const FunctionProtoType *NewFTy = NewFD->getType()->getAs<FunctionProtoType>(); 168 169 CallArgList NewArgs; 170 171 // The allocation size is the first argument. 172 QualType SizeTy = getContext().getSizeType(); 173 174 llvm::Value *NumElements = 0; 175 llvm::Value *AllocSize = EmitCXXNewAllocSize(*this, E, NumElements); 176 177 NewArgs.push_back(std::make_pair(RValue::get(AllocSize), SizeTy)); 178 179 // Emit the rest of the arguments. 180 // FIXME: Ideally, this should just use EmitCallArgs. 181 CXXNewExpr::const_arg_iterator NewArg = E->placement_arg_begin(); 182 183 // First, use the types from the function type. 184 // We start at 1 here because the first argument (the allocation size) 185 // has already been emitted. 186 for (unsigned i = 1, e = NewFTy->getNumArgs(); i != e; ++i, ++NewArg) { 187 QualType ArgType = NewFTy->getArgType(i); 188 189 assert(getContext().getCanonicalType(ArgType.getNonReferenceType()). 190 getTypePtr() == 191 getContext().getCanonicalType(NewArg->getType()).getTypePtr() && 192 "type mismatch in call argument!"); 193 194 NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType), 195 ArgType)); 196 197 } 198 199 // Either we've emitted all the call args, or we have a call to a 200 // variadic function. 201 assert((NewArg == E->placement_arg_end() || NewFTy->isVariadic()) && 202 "Extra arguments in non-variadic function!"); 203 204 // If we still have any arguments, emit them using the type of the argument. 205 for (CXXNewExpr::const_arg_iterator NewArgEnd = E->placement_arg_end(); 206 NewArg != NewArgEnd; ++NewArg) { 207 QualType ArgType = NewArg->getType(); 208 NewArgs.push_back(std::make_pair(EmitCallArg(*NewArg, ArgType), 209 ArgType)); 210 } 211 212 // Emit the call to new. 213 RValue RV = 214 EmitCall(CGM.getTypes().getFunctionInfo(NewFTy->getResultType(), NewArgs), 215 CGM.GetAddrOfFunction(NewFD), NewArgs, NewFD); 216 217 // If an allocation function is declared with an empty exception specification 218 // it returns null to indicate failure to allocate storage. [expr.new]p13. 219 // (We don't need to check for null when there's no new initializer and 220 // we're allocating a POD type). 221 bool NullCheckResult = NewFTy->hasEmptyExceptionSpec() && 222 !(AllocType->isPODType() && !E->hasInitializer()); 223 224 llvm::BasicBlock *NewNull = 0; 225 llvm::BasicBlock *NewNotNull = 0; 226 llvm::BasicBlock *NewEnd = 0; 227 228 llvm::Value *NewPtr = RV.getScalarVal(); 229 230 if (NullCheckResult) { 231 NewNull = createBasicBlock("new.null"); 232 NewNotNull = createBasicBlock("new.notnull"); 233 NewEnd = createBasicBlock("new.end"); 234 235 llvm::Value *IsNull = 236 Builder.CreateICmpEQ(NewPtr, 237 llvm::Constant::getNullValue(NewPtr->getType()), 238 "isnull"); 239 240 Builder.CreateCondBr(IsNull, NewNull, NewNotNull); 241 EmitBlock(NewNotNull); 242 } 243 244 if (uint64_t CookiePadding = CalculateCookiePadding(getContext(), E)) { 245 uint64_t CookieOffset = 246 CookiePadding - getContext().getTypeSize(SizeTy) / 8; 247 248 llvm::Value *NumElementsPtr = 249 Builder.CreateConstInBoundsGEP1_64(NewPtr, CookieOffset); 250 251 NumElementsPtr = Builder.CreateBitCast(NumElementsPtr, 252 ConvertType(SizeTy)->getPointerTo()); 253 Builder.CreateStore(NumElements, NumElementsPtr); 254 255 // Now add the padding to the new ptr. 256 NewPtr = Builder.CreateConstInBoundsGEP1_64(NewPtr, CookiePadding); 257 } 258 259 NewPtr = Builder.CreateBitCast(NewPtr, ConvertType(E->getType())); 260 261 EmitNewInitializer(*this, E, NewPtr, NumElements); 262 263 if (NullCheckResult) { 264 Builder.CreateBr(NewEnd); 265 NewNotNull = Builder.GetInsertBlock(); 266 EmitBlock(NewNull); 267 Builder.CreateBr(NewEnd); 268 EmitBlock(NewEnd); 269 270 llvm::PHINode *PHI = Builder.CreatePHI(NewPtr->getType()); 271 PHI->reserveOperandSpace(2); 272 PHI->addIncoming(NewPtr, NewNotNull); 273 PHI->addIncoming(llvm::Constant::getNullValue(NewPtr->getType()), NewNull); 274 275 NewPtr = PHI; 276 } 277 278 return NewPtr; 279 } 280 281 static std::pair<llvm::Value *, llvm::Value *> 282 GetAllocatedObjectPtrAndNumElements(CodeGenFunction &CGF, 283 llvm::Value *Ptr, QualType DeleteTy) { 284 QualType SizeTy = CGF.getContext().getSizeType(); 285 const llvm::Type *SizeLTy = CGF.ConvertType(SizeTy); 286 287 uint64_t DeleteTypeAlign = CGF.getContext().getTypeAlign(DeleteTy); 288 uint64_t CookiePadding = std::max(CGF.getContext().getTypeSize(SizeTy), 289 DeleteTypeAlign) / 8; 290 assert(CookiePadding && "CookiePadding should not be 0."); 291 292 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 293 uint64_t CookieOffset = 294 CookiePadding - CGF.getContext().getTypeSize(SizeTy) / 8; 295 296 llvm::Value *AllocatedObjectPtr = CGF.Builder.CreateBitCast(Ptr, Int8PtrTy); 297 AllocatedObjectPtr = 298 CGF.Builder.CreateConstInBoundsGEP1_64(AllocatedObjectPtr, 299 -CookiePadding); 300 301 llvm::Value *NumElementsPtr = 302 CGF.Builder.CreateConstInBoundsGEP1_64(AllocatedObjectPtr, 303 CookieOffset); 304 NumElementsPtr = 305 CGF.Builder.CreateBitCast(NumElementsPtr, SizeLTy->getPointerTo()); 306 307 llvm::Value *NumElements = CGF.Builder.CreateLoad(NumElementsPtr); 308 NumElements = 309 CGF.Builder.CreateIntCast(NumElements, SizeLTy, /*isSigned=*/false); 310 311 return std::make_pair(AllocatedObjectPtr, NumElements); 312 } 313 314 void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD, 315 llvm::Value *Ptr, 316 QualType DeleteTy) { 317 const FunctionProtoType *DeleteFTy = 318 DeleteFD->getType()->getAs<FunctionProtoType>(); 319 320 CallArgList DeleteArgs; 321 322 // Check if we need to pass the size to the delete operator. 323 llvm::Value *Size = 0; 324 QualType SizeTy; 325 if (DeleteFTy->getNumArgs() == 2) { 326 SizeTy = DeleteFTy->getArgType(1); 327 uint64_t DeleteTypeSize = getContext().getTypeSize(DeleteTy) / 8; 328 Size = llvm::ConstantInt::get(ConvertType(SizeTy), DeleteTypeSize); 329 } 330 331 if (DeleteFD->getOverloadedOperator() == OO_Array_Delete && 332 333 CalculateCookiePadding(getContext(), DeleteTy)) { 334 // We need to get the number of elements in the array from the cookie. 335 llvm::Value *AllocatedObjectPtr; 336 llvm::Value *NumElements; 337 llvm::tie(AllocatedObjectPtr, NumElements) = 338 GetAllocatedObjectPtrAndNumElements(*this, Ptr, DeleteTy); 339 340 // Multiply the size with the number of elements. 341 if (Size) 342 Size = Builder.CreateMul(NumElements, Size); 343 344 Ptr = AllocatedObjectPtr; 345 } 346 347 QualType ArgTy = DeleteFTy->getArgType(0); 348 llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy)); 349 DeleteArgs.push_back(std::make_pair(RValue::get(DeletePtr), ArgTy)); 350 351 if (Size) 352 DeleteArgs.push_back(std::make_pair(RValue::get(Size), SizeTy)); 353 354 // Emit the call to delete. 355 EmitCall(CGM.getTypes().getFunctionInfo(DeleteFTy->getResultType(), 356 DeleteArgs), 357 CGM.GetAddrOfFunction(DeleteFD), 358 DeleteArgs, DeleteFD); 359 } 360 361 void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) { 362 363 // Get at the argument before we performed the implicit conversion 364 // to void*. 365 const Expr *Arg = E->getArgument(); 366 while (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg)) { 367 if (ICE->getCastKind() != CastExpr::CK_UserDefinedConversion && 368 ICE->getType()->isVoidPointerType()) 369 Arg = ICE->getSubExpr(); 370 else 371 break; 372 } 373 374 QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType(); 375 376 llvm::Value *Ptr = EmitScalarExpr(Arg); 377 378 // Null check the pointer. 379 llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull"); 380 llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end"); 381 382 llvm::Value *IsNull = 383 Builder.CreateICmpEQ(Ptr, llvm::Constant::getNullValue(Ptr->getType()), 384 "isnull"); 385 386 Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull); 387 EmitBlock(DeleteNotNull); 388 389 bool ShouldCallDelete = true; 390 391 // Call the destructor if necessary. 392 if (const RecordType *RT = DeleteTy->getAs<RecordType>()) { 393 if (CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(RT->getDecl())) { 394 if (!RD->hasTrivialDestructor()) { 395 const CXXDestructorDecl *Dtor = RD->getDestructor(getContext()); 396 if (E->isArrayForm()) { 397 llvm::Value *AllocatedObjectPtr; 398 llvm::Value *NumElements; 399 llvm::tie(AllocatedObjectPtr, NumElements) = 400 GetAllocatedObjectPtrAndNumElements(*this, Ptr, DeleteTy); 401 402 EmitCXXAggrDestructorCall(Dtor, NumElements, Ptr); 403 } else if (Dtor->isVirtual()) { 404 const llvm::Type *Ty = 405 CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(Dtor), 406 /*isVariadic=*/false); 407 408 llvm::Value *Callee = BuildVirtualCall(Dtor, Dtor_Deleting, Ptr, Ty); 409 EmitCXXMemberCall(Dtor, Callee, Ptr, 0, 0); 410 411 // The dtor took care of deleting the object. 412 ShouldCallDelete = false; 413 } else 414 EmitCXXDestructorCall(Dtor, Dtor_Complete, Ptr); 415 } 416 } 417 } 418 419 if (ShouldCallDelete) 420 EmitDeleteCall(E->getOperatorDelete(), Ptr, DeleteTy); 421 422 EmitBlock(DeleteEnd); 423 } 424 425 llvm::Value * CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) { 426 QualType Ty = E->getType(); 427 const llvm::Type *LTy = ConvertType(Ty)->getPointerTo(); 428 429 if (E->isTypeOperand()) 430 return Builder.CreateBitCast(CGM.GetAddrOfRTTI(E->getTypeOperand()), LTy); 431 432 Expr *subE = E->getExprOperand(); 433 Ty = subE->getType(); 434 CanQualType CanTy = CGM.getContext().getCanonicalType(Ty); 435 Ty = CanTy.getUnqualifiedType().getNonReferenceType(); 436 if (const RecordType *RT = Ty->getAs<RecordType>()) { 437 const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 438 if (RD->isPolymorphic()) { 439 // FIXME: if subE is an lvalue do 440 LValue Obj = EmitLValue(subE); 441 llvm::Value *This = Obj.getAddress(); 442 LTy = LTy->getPointerTo()->getPointerTo(); 443 llvm::Value *V = Builder.CreateBitCast(This, LTy); 444 // We need to do a zero check for *p, unless it has NonNullAttr. 445 // FIXME: PointerType->hasAttr<NonNullAttr>() 446 bool CanBeZero = false; 447 if (UnaryOperator *UO = dyn_cast<UnaryOperator>(subE->IgnoreParens())) 448 if (UO->getOpcode() == UnaryOperator::Deref) 449 CanBeZero = true; 450 if (CanBeZero) { 451 llvm::BasicBlock *NonZeroBlock = createBasicBlock(); 452 llvm::BasicBlock *ZeroBlock = createBasicBlock(); 453 454 llvm::Value *Zero = llvm::Constant::getNullValue(LTy); 455 Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero), 456 NonZeroBlock, ZeroBlock); 457 EmitBlock(ZeroBlock); 458 /// Call __cxa_bad_typeid 459 const llvm::Type *ResultType = llvm::Type::getVoidTy(VMContext); 460 const llvm::FunctionType *FTy; 461 FTy = llvm::FunctionType::get(ResultType, false); 462 llvm::Value *F = CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid"); 463 Builder.CreateCall(F)->setDoesNotReturn(); 464 Builder.CreateUnreachable(); 465 EmitBlock(NonZeroBlock); 466 } 467 V = Builder.CreateLoad(V, "vtable"); 468 V = Builder.CreateConstInBoundsGEP1_64(V, -1ULL); 469 V = Builder.CreateLoad(V); 470 return V; 471 } 472 return Builder.CreateBitCast(CGM.GenerateRTTI(RD), LTy); 473 } 474 return Builder.CreateBitCast(CGM.GenerateRTTI(Ty), LTy); 475 } 476 477 llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *V, 478 const CXXDynamicCastExpr *DCE) { 479 QualType CastTy = DCE->getTypeAsWritten(); 480 QualType InnerType = CastTy->getPointeeType(); 481 QualType ArgTy = DCE->getSubExpr()->getType(); 482 const llvm::Type *LArgTy = ConvertType(ArgTy); 483 const llvm::Type *LTy = ConvertType(DCE->getType()); 484 485 bool CanBeZero = false; 486 bool ToVoid = false; 487 bool ThrowOnBad = false; 488 if (CastTy->isPointerType()) { 489 // FIXME: if PointerType->hasAttr<NonNullAttr>(), we don't set this 490 CanBeZero = true; 491 if (InnerType->isVoidType()) 492 ToVoid = true; 493 } else { 494 LTy = LTy->getPointerTo(); 495 ThrowOnBad = true; 496 } 497 498 CXXRecordDecl *SrcTy; 499 QualType Ty = ArgTy; 500 if (ArgTy.getTypePtr()->isPointerType() 501 || ArgTy.getTypePtr()->isReferenceType()) 502 Ty = Ty.getTypePtr()->getPointeeType(); 503 CanQualType CanTy = CGM.getContext().getCanonicalType(Ty); 504 Ty = CanTy.getUnqualifiedType(); 505 SrcTy = cast<CXXRecordDecl>(Ty->getAs<RecordType>()->getDecl()); 506 507 llvm::BasicBlock *ContBlock = createBasicBlock(); 508 llvm::BasicBlock *NullBlock = 0; 509 llvm::BasicBlock *NonZeroBlock = 0; 510 if (CanBeZero) { 511 NonZeroBlock = createBasicBlock(); 512 NullBlock = createBasicBlock(); 513 llvm::Value *Zero = llvm::Constant::getNullValue(LArgTy); 514 Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero), 515 NonZeroBlock, NullBlock); 516 EmitBlock(NonZeroBlock); 517 } 518 519 llvm::BasicBlock *BadCastBlock = 0; 520 521 const llvm::Type *PtrDiffTy = ConvertType(getContext().getSizeType()); 522 523 // See if this is a dynamic_cast(void*) 524 if (ToVoid) { 525 llvm::Value *This = V; 526 V = Builder.CreateBitCast(This, PtrDiffTy->getPointerTo()->getPointerTo()); 527 V = Builder.CreateLoad(V, "vtable"); 528 V = Builder.CreateConstInBoundsGEP1_64(V, -2ULL); 529 V = Builder.CreateLoad(V, "offset to top"); 530 This = Builder.CreateBitCast(This, llvm::Type::getInt8PtrTy(VMContext)); 531 V = Builder.CreateInBoundsGEP(This, V); 532 V = Builder.CreateBitCast(V, LTy); 533 } else { 534 /// Call __dynamic_cast 535 const llvm::Type *ResultType = llvm::Type::getInt8PtrTy(VMContext); 536 const llvm::FunctionType *FTy; 537 std::vector<const llvm::Type*> ArgTys; 538 const llvm::Type *PtrToInt8Ty 539 = llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 540 ArgTys.push_back(PtrToInt8Ty); 541 ArgTys.push_back(PtrToInt8Ty); 542 ArgTys.push_back(PtrToInt8Ty); 543 ArgTys.push_back(PtrDiffTy); 544 FTy = llvm::FunctionType::get(ResultType, ArgTys, false); 545 CXXRecordDecl *DstTy; 546 Ty = CastTy.getTypePtr()->getPointeeType(); 547 CanTy = CGM.getContext().getCanonicalType(Ty); 548 Ty = CanTy.getUnqualifiedType(); 549 DstTy = cast<CXXRecordDecl>(Ty->getAs<RecordType>()->getDecl()); 550 551 // FIXME: Calculate better hint. 552 llvm::Value *hint = llvm::ConstantInt::get(PtrDiffTy, -1ULL); 553 llvm::Value *SrcArg = CGM.GenerateRTTIRef(SrcTy); 554 llvm::Value *DstArg = CGM.GenerateRTTIRef(DstTy); 555 V = Builder.CreateBitCast(V, PtrToInt8Ty); 556 V = Builder.CreateCall4(CGM.CreateRuntimeFunction(FTy, "__dynamic_cast"), 557 V, SrcArg, DstArg, hint); 558 V = Builder.CreateBitCast(V, LTy); 559 560 if (ThrowOnBad) { 561 BadCastBlock = createBasicBlock(); 562 563 llvm::Value *Zero = llvm::Constant::getNullValue(LTy); 564 Builder.CreateCondBr(Builder.CreateICmpNE(V, Zero), 565 ContBlock, BadCastBlock); 566 EmitBlock(BadCastBlock); 567 /// Call __cxa_bad_cast 568 ResultType = llvm::Type::getVoidTy(VMContext); 569 const llvm::FunctionType *FBadTy; 570 FBadTy = llvm::FunctionType::get(ResultType, false); 571 llvm::Value *F = CGM.CreateRuntimeFunction(FBadTy, "__cxa_bad_cast"); 572 Builder.CreateCall(F)->setDoesNotReturn(); 573 Builder.CreateUnreachable(); 574 } 575 } 576 577 if (CanBeZero) { 578 Builder.CreateBr(ContBlock); 579 EmitBlock(NullBlock); 580 Builder.CreateBr(ContBlock); 581 } 582 EmitBlock(ContBlock); 583 if (CanBeZero) { 584 llvm::PHINode *PHI = Builder.CreatePHI(LTy); 585 PHI->reserveOperandSpace(2); 586 PHI->addIncoming(V, NonZeroBlock); 587 PHI->addIncoming(llvm::Constant::getNullValue(LTy), NullBlock); 588 V = PHI; 589 } 590 591 return V; 592 } 593