1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This contains code dealing with C++ code generation of classes
12 //===----------------------------------------------------------------------===//
15 #include "CGDebugInfo.h"
16 #include "CodeGenFunction.h"
17 #include "clang/AST/CXXInheritance.h"
18 #include "clang/AST/EvaluatedExprVisitor.h"
19 #include "clang/AST/RecordLayout.h"
20 #include "clang/AST/StmtCXX.h"
21 #include "clang/Frontend/CodeGenOptions.h"
23 using namespace clang;
24 using namespace CodeGen;
27 ComputeNonVirtualBaseClassOffset(ASTContext &Context,
28 const CXXRecordDecl *DerivedClass,
29 CastExpr::path_const_iterator Start,
30 CastExpr::path_const_iterator End) {
31 CharUnits Offset = CharUnits::Zero();
33 const CXXRecordDecl *RD = DerivedClass;
35 for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
36 const CXXBaseSpecifier *Base = *I;
37 assert(!Base->isVirtual() && "Should not see virtual bases here!");
40 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
42 const CXXRecordDecl *BaseDecl =
43 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
46 Offset += Layout.getBaseClassOffset(BaseDecl);
55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
56 CastExpr::path_const_iterator PathBegin,
57 CastExpr::path_const_iterator PathEnd) {
58 assert(PathBegin != PathEnd && "Base path should not be empty!");
61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
66 llvm::Type *PtrDiffTy =
67 Types.ConvertType(getContext().getPointerDiffType());
69 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
72 /// Gets the address of a direct base class within a complete object.
73 /// This should only be used for (1) non-virtual bases or (2) virtual bases
74 /// when the type is known to be complete (e.g. in complete destructors).
76 /// The object pointed to by 'This' is assumed to be non-null.
78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
79 const CXXRecordDecl *Derived,
80 const CXXRecordDecl *Base,
82 // 'this' must be a pointer (in some address space) to Derived.
83 assert(This->getType()->isPointerTy() &&
84 cast<llvm::PointerType>(This->getType())->getElementType()
85 == ConvertType(Derived));
87 // Compute the offset of the virtual base.
89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
91 Offset = Layout.getVBaseClassOffset(Base);
93 Offset = Layout.getBaseClassOffset(Base);
95 // Shift and cast down to the base type.
96 // TODO: for complete types, this should be possible with a GEP.
97 llvm::Value *V = This;
98 if (Offset.isPositive()) {
99 V = Builder.CreateBitCast(V, Int8PtrTy);
100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity());
102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr,
109 CharUnits nonVirtualOffset,
110 llvm::Value *virtualOffset) {
111 // Assert that we have something to do.
112 assert(!nonVirtualOffset.isZero() || virtualOffset != 0);
114 // Compute the offset from the static and dynamic components.
115 llvm::Value *baseOffset;
116 if (!nonVirtualOffset.isZero()) {
117 baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy,
118 nonVirtualOffset.getQuantity());
120 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
123 baseOffset = virtualOffset;
126 // Apply the base offset.
127 ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy);
128 ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr");
133 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
134 const CXXRecordDecl *Derived,
135 CastExpr::path_const_iterator PathBegin,
136 CastExpr::path_const_iterator PathEnd,
137 bool NullCheckValue) {
138 assert(PathBegin != PathEnd && "Base path should not be empty!");
140 CastExpr::path_const_iterator Start = PathBegin;
141 const CXXRecordDecl *VBase = 0;
143 // Sema has done some convenient canonicalization here: if the
144 // access path involved any virtual steps, the conversion path will
145 // *start* with a step down to the correct virtual base subobject,
146 // and hence will not require any further steps.
147 if ((*Start)->isVirtual()) {
149 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
153 // Compute the static offset of the ultimate destination within its
154 // allocating subobject (the virtual base, if there is one, or else
155 // the "complete" object that we see).
156 CharUnits NonVirtualOffset =
157 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
160 // If there's a virtual step, we can sometimes "devirtualize" it.
161 // For now, that's limited to when the derived type is final.
162 // TODO: "devirtualize" this for accesses to known-complete objects.
163 if (VBase && Derived->hasAttr<FinalAttr>()) {
164 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
165 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
166 NonVirtualOffset += vBaseOffset;
167 VBase = 0; // we no longer have a virtual step
170 // Get the base pointer type.
171 llvm::Type *BasePtrTy =
172 ConvertType((PathEnd[-1])->getType())->getPointerTo();
174 // If the static offset is zero and we don't have a virtual step,
175 // just do a bitcast; null checks are unnecessary.
176 if (NonVirtualOffset.isZero() && !VBase) {
177 return Builder.CreateBitCast(Value, BasePtrTy);
180 llvm::BasicBlock *origBB = 0;
181 llvm::BasicBlock *endBB = 0;
183 // Skip over the offset (and the vtable load) if we're supposed to
184 // null-check the pointer.
185 if (NullCheckValue) {
186 origBB = Builder.GetInsertBlock();
187 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
188 endBB = createBasicBlock("cast.end");
190 llvm::Value *isNull = Builder.CreateIsNull(Value);
191 Builder.CreateCondBr(isNull, endBB, notNullBB);
192 EmitBlock(notNullBB);
195 // Compute the virtual offset.
196 llvm::Value *VirtualOffset = 0;
198 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
201 // Apply both offsets.
202 Value = ApplyNonVirtualAndVirtualOffset(*this, Value,
206 // Cast to the destination type.
207 Value = Builder.CreateBitCast(Value, BasePtrTy);
209 // Build a phi if we needed a null check.
210 if (NullCheckValue) {
211 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
212 Builder.CreateBr(endBB);
215 llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result");
216 PHI->addIncoming(Value, notNullBB);
217 PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB);
225 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
226 const CXXRecordDecl *Derived,
227 CastExpr::path_const_iterator PathBegin,
228 CastExpr::path_const_iterator PathEnd,
229 bool NullCheckValue) {
230 assert(PathBegin != PathEnd && "Base path should not be empty!");
233 getContext().getCanonicalType(getContext().getTagDeclType(Derived));
234 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
236 llvm::Value *NonVirtualOffset =
237 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
239 if (!NonVirtualOffset) {
240 // No offset, we can just cast back.
241 return Builder.CreateBitCast(Value, DerivedPtrTy);
244 llvm::BasicBlock *CastNull = 0;
245 llvm::BasicBlock *CastNotNull = 0;
246 llvm::BasicBlock *CastEnd = 0;
248 if (NullCheckValue) {
249 CastNull = createBasicBlock("cast.null");
250 CastNotNull = createBasicBlock("cast.notnull");
251 CastEnd = createBasicBlock("cast.end");
253 llvm::Value *IsNull = Builder.CreateIsNull(Value);
254 Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
255 EmitBlock(CastNotNull);
259 Value = Builder.CreateBitCast(Value, Int8PtrTy);
260 Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset),
264 Value = Builder.CreateBitCast(Value, DerivedPtrTy);
266 if (NullCheckValue) {
267 Builder.CreateBr(CastEnd);
269 Builder.CreateBr(CastEnd);
272 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
273 PHI->addIncoming(Value, CastNotNull);
274 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
282 /// GetVTTParameter - Return the VTT parameter that should be passed to a
283 /// base constructor/destructor with virtual bases.
284 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
285 bool ForVirtualBase) {
286 if (!CodeGenVTables::needsVTTParameter(GD)) {
287 // This constructor/destructor does not need a VTT parameter.
291 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
292 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
296 uint64_t SubVTTIndex;
298 // If the record matches the base, this is the complete ctor/dtor
299 // variant calling the base variant in a class with virtual bases.
301 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
302 "doing no-op VTT offset in base dtor/ctor?");
303 assert(!ForVirtualBase && "Can't have same class as virtual base!");
306 const ASTRecordLayout &Layout =
307 CGF.getContext().getASTRecordLayout(RD);
308 CharUnits BaseOffset = ForVirtualBase ?
309 Layout.getVBaseClassOffset(Base) :
310 Layout.getBaseClassOffset(Base);
313 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
314 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
317 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
318 // A VTT parameter was passed to the constructor, use it.
319 VTT = CGF.LoadCXXVTT();
320 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
322 // We're the complete constructor, so get the VTT by name.
323 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD);
324 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
331 /// Call the destructor for a direct base class.
332 struct CallBaseDtor : EHScopeStack::Cleanup {
333 const CXXRecordDecl *BaseClass;
335 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
336 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
338 void Emit(CodeGenFunction &CGF, Flags flags) {
339 const CXXRecordDecl *DerivedClass =
340 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
342 const CXXDestructorDecl *D = BaseClass->getDestructor();
344 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
345 DerivedClass, BaseClass,
347 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
351 /// A visitor which checks whether an initializer uses 'this' in a
352 /// way which requires the vtable to be properly set.
353 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> {
354 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super;
358 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {}
360 // Black-list all explicit and implicit references to 'this'.
362 // Do we need to worry about external references to 'this' derived
363 // from arbitrary code? If so, then anything which runs arbitrary
364 // external code might potentially access the vtable.
365 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; }
369 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
370 DynamicThisUseChecker Checker(C);
371 Checker.Visit(const_cast<Expr*>(Init));
372 return Checker.UsesThis;
375 static void EmitBaseInitializer(CodeGenFunction &CGF,
376 const CXXRecordDecl *ClassDecl,
377 CXXCtorInitializer *BaseInit,
378 CXXCtorType CtorType) {
379 assert(BaseInit->isBaseInitializer() &&
380 "Must have base initializer!");
382 llvm::Value *ThisPtr = CGF.LoadCXXThis();
384 const Type *BaseType = BaseInit->getBaseClass();
385 CXXRecordDecl *BaseClassDecl =
386 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
388 bool isBaseVirtual = BaseInit->isBaseVirtual();
390 // The base constructor doesn't construct virtual bases.
391 if (CtorType == Ctor_Base && isBaseVirtual)
394 // If the initializer for the base (other than the constructor
395 // itself) accesses 'this' in any way, we need to initialize the
397 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
398 CGF.InitializeVTablePointers(ClassDecl);
400 // We can pretend to be a complete class because it only matters for
401 // virtual bases, and we only do virtual bases for complete ctors.
403 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
406 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType);
407 AggValueSlot AggSlot =
408 AggValueSlot::forAddr(V, Alignment, Qualifiers(),
409 AggValueSlot::IsDestructed,
410 AggValueSlot::DoesNotNeedGCBarriers,
411 AggValueSlot::IsNotAliased);
413 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
415 if (CGF.CGM.getLangOpts().Exceptions &&
416 !BaseClassDecl->hasTrivialDestructor())
417 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
421 static void EmitAggMemberInitializer(CodeGenFunction &CGF,
424 llvm::Value *ArrayIndexVar,
426 ArrayRef<VarDecl *> ArrayIndexes,
428 if (Index == ArrayIndexes.size()) {
430 { // Scope for Cleanups.
431 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
434 // If we have an array index variable, load it and use it as an offset.
435 // Then, increment the value.
436 llvm::Value *Dest = LHS.getAddress();
437 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
438 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
439 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
440 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
441 CGF.Builder.CreateStore(Next, ArrayIndexVar);
443 // Update the LValue.
445 CharUnits Align = CGF.getContext().getTypeAlignInChars(T);
446 LV.setAlignment(std::min(Align, LV.getAlignment()));
449 if (!CGF.hasAggregateLLVMType(T)) {
450 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false);
451 } else if (T->isAnyComplexType()) {
452 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(),
453 LV.isVolatileQualified());
456 AggValueSlot::forLValue(LV,
457 AggValueSlot::IsDestructed,
458 AggValueSlot::DoesNotNeedGCBarriers,
459 AggValueSlot::IsNotAliased);
461 CGF.EmitAggExpr(Init, Slot);
465 // Now, outside of the initializer cleanup scope, destroy the backing array
466 // for a std::initializer_list member.
467 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init);
472 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
473 assert(Array && "Array initialization without the array type?");
474 llvm::Value *IndexVar
475 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]);
476 assert(IndexVar && "Array index variable not loaded");
478 // Initialize this index variable to zero.
480 = llvm::Constant::getNullValue(
481 CGF.ConvertType(CGF.getContext().getSizeType()));
482 CGF.Builder.CreateStore(Zero, IndexVar);
484 // Start the loop with a block that tests the condition.
485 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
486 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
488 CGF.EmitBlock(CondBlock);
490 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
491 // Generate: if (loop-index < number-of-elements) fall to the loop body,
492 // otherwise, go to the block after the for-loop.
493 uint64_t NumElements = Array->getSize().getZExtValue();
494 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
495 llvm::Value *NumElementsPtr =
496 llvm::ConstantInt::get(Counter->getType(), NumElements);
497 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
500 // If the condition is true, execute the body.
501 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
503 CGF.EmitBlock(ForBody);
504 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
507 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
509 // Inside the loop body recurse to emit the inner loop or, eventually, the
511 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar,
512 Array->getElementType(), ArrayIndexes, Index + 1);
515 CGF.EmitBlock(ContinueBlock);
517 // Emit the increment of the loop counter.
518 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
519 Counter = CGF.Builder.CreateLoad(IndexVar);
520 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
521 CGF.Builder.CreateStore(NextVal, IndexVar);
523 // Finally, branch back up to the condition for the next iteration.
524 CGF.EmitBranch(CondBlock);
526 // Emit the fall-through block.
527 CGF.EmitBlock(AfterFor, true);
531 struct CallMemberDtor : EHScopeStack::Cleanup {
533 CXXDestructorDecl *Dtor;
535 CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor)
536 : V(V), Dtor(Dtor) {}
538 void Emit(CodeGenFunction &CGF, Flags flags) {
539 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
545 static bool hasTrivialCopyOrMoveConstructor(const CXXRecordDecl *Record,
547 return Moving ? Record->hasTrivialMoveConstructor() :
548 Record->hasTrivialCopyConstructor();
551 static void EmitMemberInitializer(CodeGenFunction &CGF,
552 const CXXRecordDecl *ClassDecl,
553 CXXCtorInitializer *MemberInit,
554 const CXXConstructorDecl *Constructor,
555 FunctionArgList &Args) {
556 assert(MemberInit->isAnyMemberInitializer() &&
557 "Must have member initializer!");
558 assert(MemberInit->getInit() && "Must have initializer!");
560 // non-static data member initializers.
561 FieldDecl *Field = MemberInit->getAnyMember();
562 QualType FieldType = Field->getType();
564 llvm::Value *ThisPtr = CGF.LoadCXXThis();
565 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
566 LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
568 if (MemberInit->isIndirectMemberInitializer()) {
569 // If we are initializing an anonymous union field, drill down to
571 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
572 IndirectFieldDecl::chain_iterator I = IndirectField->chain_begin(),
573 IEnd = IndirectField->chain_end();
574 for ( ; I != IEnd; ++I)
575 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(*I));
576 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType();
578 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
581 // Special case: if we are in a copy or move constructor, and we are copying
582 // an array of PODs or classes with trivial copy constructors, ignore the
583 // AST and perform the copy we know is equivalent.
584 // FIXME: This is hacky at best... if we had a bit more explicit information
585 // in the AST, we could generalize it more easily.
586 const ConstantArrayType *Array
587 = CGF.getContext().getAsConstantArrayType(FieldType);
588 if (Array && Constructor->isImplicitlyDefined() &&
589 Constructor->isCopyOrMoveConstructor()) {
590 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
591 const CXXRecordDecl *Record = BaseElementTy->getAsCXXRecordDecl();
592 if (BaseElementTy.isPODType(CGF.getContext()) ||
593 (Record && hasTrivialCopyOrMoveConstructor(Record,
594 Constructor->isMoveConstructor()))) {
595 // Find the source pointer. We knows it's the last argument because
596 // we know we're in a copy constructor.
597 unsigned SrcArgIndex = Args.size() - 1;
599 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
600 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
601 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
603 // Copy the aggregate.
604 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
605 LHS.isVolatileQualified());
610 ArrayRef<VarDecl *> ArrayIndexes;
611 if (MemberInit->getNumArrayIndices())
612 ArrayIndexes = MemberInit->getArrayIndexes();
613 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes);
616 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field,
617 LValue LHS, Expr *Init,
618 ArrayRef<VarDecl *> ArrayIndexes) {
619 QualType FieldType = Field->getType();
620 if (!hasAggregateLLVMType(FieldType)) {
621 if (LHS.isSimple()) {
622 EmitExprAsInit(Init, Field, LHS, false);
624 RValue RHS = RValue::get(EmitScalarExpr(Init));
625 EmitStoreThroughLValue(RHS, LHS);
627 } else if (FieldType->isAnyComplexType()) {
628 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified());
630 llvm::Value *ArrayIndexVar = 0;
631 if (ArrayIndexes.size()) {
632 llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
634 // The LHS is a pointer to the first object we'll be constructing, as
636 QualType BaseElementTy = getContext().getBaseElementType(FieldType);
637 llvm::Type *BasePtr = ConvertType(BaseElementTy);
638 BasePtr = llvm::PointerType::getUnqual(BasePtr);
639 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(),
641 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy);
643 // Create an array index that will be used to walk over all of the
644 // objects we're constructing.
645 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index");
646 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
647 Builder.CreateStore(Zero, ArrayIndexVar);
650 // Emit the block variables for the array indices, if any.
651 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I)
652 EmitAutoVarDecl(*ArrayIndexes[I]);
655 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType,
658 if (!CGM.getLangOpts().Exceptions)
661 // FIXME: If we have an array of classes w/ non-trivial destructors,
662 // we need to destroy in reverse order of construction along the exception
664 const RecordType *RT = FieldType->getAs<RecordType>();
668 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
669 if (!RD->hasTrivialDestructor())
670 EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(),
671 RD->getDestructor());
675 /// Checks whether the given constructor is a valid subject for the
676 /// complete-to-base constructor delegation optimization, i.e.
677 /// emitting the complete constructor as a simple call to the base
679 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
681 // Currently we disable the optimization for classes with virtual
682 // bases because (1) the addresses of parameter variables need to be
683 // consistent across all initializers but (2) the delegate function
684 // call necessarily creates a second copy of the parameter variable.
686 // The limiting example (purely theoretical AFAIK):
687 // struct A { A(int &c) { c++; } };
688 // struct B : virtual A {
689 // B(int count) : A(count) { printf("%d\n", count); }
691 // ...although even this example could in principle be emitted as a
692 // delegation since the address of the parameter doesn't escape.
693 if (Ctor->getParent()->getNumVBases()) {
694 // TODO: white-list trivial vbase initializers. This case wouldn't
695 // be subject to the restrictions below.
697 // TODO: white-list cases where:
698 // - there are no non-reference parameters to the constructor
699 // - the initializers don't access any non-reference parameters
700 // - the initializers don't take the address of non-reference
703 // If we ever add any of the above cases, remember that:
704 // - function-try-blocks will always blacklist this optimization
705 // - we need to perform the constructor prologue and cleanup in
706 // EmitConstructorBody.
711 // We also disable the optimization for variadic functions because
712 // it's impossible to "re-pass" varargs.
713 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
716 // FIXME: Decide if we can do a delegation of a delegating constructor.
717 if (Ctor->isDelegatingConstructor())
723 /// EmitConstructorBody - Emits the body of the current constructor.
724 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
725 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
726 CXXCtorType CtorType = CurGD.getCtorType();
728 // Before we go any further, try the complete->base constructor
729 // delegation optimization.
730 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&
731 CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) {
732 if (CGDebugInfo *DI = getDebugInfo())
733 DI->EmitLocation(Builder, Ctor->getLocEnd());
734 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
738 Stmt *Body = Ctor->getBody();
740 // Enter the function-try-block before the constructor prologue if
742 bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
744 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
746 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
748 // TODO: in restricted cases, we can emit the vbase initializers of
749 // a complete ctor and then delegate to the base ctor.
751 // Emit the constructor prologue, i.e. the base and member
753 EmitCtorPrologue(Ctor, CtorType, Args);
755 // Emit the body of the statement.
757 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
761 // Emit any cleanup blocks associated with the member or base
762 // initializers, which includes (along the exceptional path) the
763 // destructors for those members and bases that were fully
765 PopCleanupBlocks(CleanupDepth);
768 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
771 /// EmitCtorPrologue - This routine generates necessary code to initialize
772 /// base classes and non-static data members belonging to this constructor.
773 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
774 CXXCtorType CtorType,
775 FunctionArgList &Args) {
776 if (CD->isDelegatingConstructor())
777 return EmitDelegatingCXXConstructorCall(CD, Args);
779 const CXXRecordDecl *ClassDecl = CD->getParent();
781 SmallVector<CXXCtorInitializer *, 8> MemberInitializers;
783 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
786 CXXCtorInitializer *Member = (*B);
788 if (Member->isBaseInitializer()) {
789 EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
791 assert(Member->isAnyMemberInitializer() &&
792 "Delegating initializer on non-delegating constructor");
793 MemberInitializers.push_back(Member);
797 InitializeVTablePointers(ClassDecl);
799 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
800 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
804 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
807 HasTrivialDestructorBody(ASTContext &Context,
808 const CXXRecordDecl *BaseClassDecl,
809 const CXXRecordDecl *MostDerivedClassDecl)
811 // If the destructor is trivial we don't have to check anything else.
812 if (BaseClassDecl->hasTrivialDestructor())
815 if (!BaseClassDecl->getDestructor()->hasTrivialBody())
819 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(),
820 E = BaseClassDecl->field_end(); I != E; ++I) {
821 const FieldDecl *Field = *I;
823 if (!FieldHasTrivialDestructorBody(Context, Field))
827 // Check non-virtual bases.
828 for (CXXRecordDecl::base_class_const_iterator I =
829 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end();
834 const CXXRecordDecl *NonVirtualBase =
835 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
836 if (!HasTrivialDestructorBody(Context, NonVirtualBase,
837 MostDerivedClassDecl))
841 if (BaseClassDecl == MostDerivedClassDecl) {
842 // Check virtual bases.
843 for (CXXRecordDecl::base_class_const_iterator I =
844 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end();
846 const CXXRecordDecl *VirtualBase =
847 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
848 if (!HasTrivialDestructorBody(Context, VirtualBase,
849 MostDerivedClassDecl))
858 FieldHasTrivialDestructorBody(ASTContext &Context,
859 const FieldDecl *Field)
861 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
863 const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
867 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
868 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
871 /// CanSkipVTablePointerInitialization - Check whether we need to initialize
872 /// any vtable pointers before calling this destructor.
873 static bool CanSkipVTablePointerInitialization(ASTContext &Context,
874 const CXXDestructorDecl *Dtor) {
875 if (!Dtor->hasTrivialBody())
879 const CXXRecordDecl *ClassDecl = Dtor->getParent();
880 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
881 E = ClassDecl->field_end(); I != E; ++I) {
882 const FieldDecl *Field = *I;
884 if (!FieldHasTrivialDestructorBody(Context, Field))
891 /// EmitDestructorBody - Emits the body of the current destructor.
892 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
893 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
894 CXXDtorType DtorType = CurGD.getDtorType();
896 // The call to operator delete in a deleting destructor happens
897 // outside of the function-try-block, which means it's always
898 // possible to delegate the destructor body to the complete
899 // destructor. Do so.
900 if (DtorType == Dtor_Deleting) {
901 EnterDtorCleanups(Dtor, Dtor_Deleting);
902 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
908 Stmt *Body = Dtor->getBody();
910 // If the body is a function-try-block, enter the try before
912 bool isTryBody = (Body && isa<CXXTryStmt>(Body));
914 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
916 // Enter the epilogue cleanups.
917 RunCleanupsScope DtorEpilogue(*this);
919 // If this is the complete variant, just invoke the base variant;
920 // the epilogue will destruct the virtual bases. But we can't do
921 // this optimization if the body is a function-try-block, because
922 // we'd introduce *two* handler blocks.
924 case Dtor_Deleting: llvm_unreachable("already handled deleting case");
927 // Enter the cleanup scopes for virtual bases.
928 EnterDtorCleanups(Dtor, Dtor_Complete);
930 if (!isTryBody && CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) {
931 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
935 // Fallthrough: act like we're in the base variant.
938 // Enter the cleanup scopes for fields and non-virtual bases.
939 EnterDtorCleanups(Dtor, Dtor_Base);
941 // Initialize the vtable pointers before entering the body.
942 if (!CanSkipVTablePointerInitialization(getContext(), Dtor))
943 InitializeVTablePointers(Dtor->getParent());
946 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
950 assert(Dtor->isImplicit() && "bodyless dtor not implicit");
951 // nothing to do besides what's in the epilogue
953 // -fapple-kext must inline any call to this dtor into
954 // the caller's body.
955 if (getContext().getLangOpts().AppleKext)
956 CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
960 // Jump out through the epilogue cleanups.
961 DtorEpilogue.ForceCleanup();
963 // Exit the try if applicable.
965 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
969 /// Call the operator delete associated with the current destructor.
970 struct CallDtorDelete : EHScopeStack::Cleanup {
973 void Emit(CodeGenFunction &CGF, Flags flags) {
974 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
975 const CXXRecordDecl *ClassDecl = Dtor->getParent();
976 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
977 CGF.getContext().getTagDeclType(ClassDecl));
981 class DestroyField : public EHScopeStack::Cleanup {
982 const FieldDecl *field;
983 CodeGenFunction::Destroyer *destroyer;
984 bool useEHCleanupForArray;
987 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
988 bool useEHCleanupForArray)
989 : field(field), destroyer(destroyer),
990 useEHCleanupForArray(useEHCleanupForArray) {}
992 void Emit(CodeGenFunction &CGF, Flags flags) {
993 // Find the address of the field.
994 llvm::Value *thisValue = CGF.LoadCXXThis();
995 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
996 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
997 LValue LV = CGF.EmitLValueForField(ThisLV, field);
998 assert(LV.isSimple());
1000 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
1001 flags.isForNormalCleanup() && useEHCleanupForArray);
1006 /// EmitDtorEpilogue - Emit all code that comes at the end of class's
1007 /// destructor. This is to call destructors on members and base classes
1008 /// in reverse order of their construction.
1009 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
1010 CXXDtorType DtorType) {
1011 assert(!DD->isTrivial() &&
1012 "Should not emit dtor epilogue for trivial dtor!");
1014 // The deleting-destructor phase just needs to call the appropriate
1015 // operator delete that Sema picked up.
1016 if (DtorType == Dtor_Deleting) {
1017 assert(DD->getOperatorDelete() &&
1018 "operator delete missing - EmitDtorEpilogue");
1019 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
1023 const CXXRecordDecl *ClassDecl = DD->getParent();
1025 // Unions have no bases and do not call field destructors.
1026 if (ClassDecl->isUnion())
1029 // The complete-destructor phase just destructs all the virtual bases.
1030 if (DtorType == Dtor_Complete) {
1032 // We push them in the forward order so that they'll be popped in
1033 // the reverse order.
1034 for (CXXRecordDecl::base_class_const_iterator I =
1035 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
1037 const CXXBaseSpecifier &Base = *I;
1038 CXXRecordDecl *BaseClassDecl
1039 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
1041 // Ignore trivial destructors.
1042 if (BaseClassDecl->hasTrivialDestructor())
1045 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1047 /*BaseIsVirtual*/ true);
1053 assert(DtorType == Dtor_Base);
1055 // Destroy non-virtual bases.
1056 for (CXXRecordDecl::base_class_const_iterator I =
1057 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
1058 const CXXBaseSpecifier &Base = *I;
1060 // Ignore virtual bases.
1061 if (Base.isVirtual())
1064 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
1066 // Ignore trivial destructors.
1067 if (BaseClassDecl->hasTrivialDestructor())
1070 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1072 /*BaseIsVirtual*/ false);
1075 // Destroy direct fields.
1076 SmallVector<const FieldDecl *, 16> FieldDecls;
1077 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
1078 E = ClassDecl->field_end(); I != E; ++I) {
1079 const FieldDecl *field = *I;
1080 QualType type = field->getType();
1081 QualType::DestructionKind dtorKind = type.isDestructedType();
1082 if (!dtorKind) continue;
1084 // Anonymous union members do not have their destructors called.
1085 const RecordType *RT = type->getAsUnionType();
1086 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue;
1088 CleanupKind cleanupKind = getCleanupKind(dtorKind);
1089 EHStack.pushCleanup<DestroyField>(cleanupKind, field,
1090 getDestroyer(dtorKind),
1091 cleanupKind & EHCleanup);
1095 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1096 /// constructor for each of several members of an array.
1098 /// \param ctor the constructor to call for each element
1099 /// \param argBegin,argEnd the arguments to evaluate and pass to the
1101 /// \param arrayType the type of the array to initialize
1102 /// \param arrayBegin an arrayType*
1103 /// \param zeroInitialize true if each element should be
1104 /// zero-initialized before it is constructed
1106 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
1107 const ConstantArrayType *arrayType,
1108 llvm::Value *arrayBegin,
1109 CallExpr::const_arg_iterator argBegin,
1110 CallExpr::const_arg_iterator argEnd,
1111 bool zeroInitialize) {
1112 QualType elementType;
1113 llvm::Value *numElements =
1114 emitArrayLength(arrayType, elementType, arrayBegin);
1116 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin,
1117 argBegin, argEnd, zeroInitialize);
1120 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1121 /// constructor for each of several members of an array.
1123 /// \param ctor the constructor to call for each element
1124 /// \param numElements the number of elements in the array;
1126 /// \param argBegin,argEnd the arguments to evaluate and pass to the
1128 /// \param arrayBegin a T*, where T is the type constructed by ctor
1129 /// \param zeroInitialize true if each element should be
1130 /// zero-initialized before it is constructed
1132 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
1133 llvm::Value *numElements,
1134 llvm::Value *arrayBegin,
1135 CallExpr::const_arg_iterator argBegin,
1136 CallExpr::const_arg_iterator argEnd,
1137 bool zeroInitialize) {
1139 // It's legal for numElements to be zero. This can happen both
1140 // dynamically, because x can be zero in 'new A[x]', and statically,
1141 // because of GCC extensions that permit zero-length arrays. There
1142 // are probably legitimate places where we could assume that this
1143 // doesn't happen, but it's not clear that it's worth it.
1144 llvm::BranchInst *zeroCheckBranch = 0;
1146 // Optimize for a constant count.
1147 llvm::ConstantInt *constantCount
1148 = dyn_cast<llvm::ConstantInt>(numElements);
1149 if (constantCount) {
1150 // Just skip out if the constant count is zero.
1151 if (constantCount->isZero()) return;
1153 // Otherwise, emit the check.
1155 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
1156 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
1157 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
1161 // Find the end of the array.
1162 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements,
1165 // Enter the loop, setting up a phi for the current location to initialize.
1166 llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
1167 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
1169 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
1171 cur->addIncoming(arrayBegin, entryBB);
1173 // Inside the loop body, emit the constructor call on the array element.
1175 QualType type = getContext().getTypeDeclType(ctor->getParent());
1177 // Zero initialize the storage, if requested.
1179 EmitNullInitialization(cur, type);
1181 // C++ [class.temporary]p4:
1182 // There are two contexts in which temporaries are destroyed at a different
1183 // point than the end of the full-expression. The first context is when a
1184 // default constructor is called to initialize an element of an array.
1185 // If the constructor has one or more default arguments, the destruction of
1186 // every temporary created in a default argument expression is sequenced
1187 // before the construction of the next array element, if any.
1190 RunCleanupsScope Scope(*this);
1192 // Evaluate the constructor and its arguments in a regular
1193 // partial-destroy cleanup.
1194 if (getLangOpts().Exceptions &&
1195 !ctor->getParent()->hasTrivialDestructor()) {
1196 Destroyer *destroyer = destroyCXXObject;
1197 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer);
1200 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false,
1201 cur, argBegin, argEnd);
1204 // Go to the next element.
1206 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1),
1208 cur->addIncoming(next, Builder.GetInsertBlock());
1210 // Check whether that's the end of the loop.
1211 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
1212 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
1213 Builder.CreateCondBr(done, contBB, loopBB);
1215 // Patch the earlier check to skip over the loop.
1216 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
1221 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
1224 const RecordType *rtype = type->castAs<RecordType>();
1225 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
1226 const CXXDestructorDecl *dtor = record->getDestructor();
1227 assert(!dtor->isTrivial());
1228 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
1233 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
1234 CXXCtorType Type, bool ForVirtualBase,
1236 CallExpr::const_arg_iterator ArgBeg,
1237 CallExpr::const_arg_iterator ArgEnd) {
1239 CGDebugInfo *DI = getDebugInfo();
1241 CGM.getCodeGenOpts().DebugInfo == CodeGenOptions::LimitedDebugInfo) {
1242 // If debug info for this class has not been emitted then this is the
1243 // right time to do so.
1244 const CXXRecordDecl *Parent = D->getParent();
1245 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent),
1246 Parent->getLocation());
1249 if (D->isTrivial()) {
1250 if (ArgBeg == ArgEnd) {
1251 // Trivial default constructor, no codegen required.
1252 assert(D->isDefaultConstructor() &&
1253 "trivial 0-arg ctor not a default ctor");
1257 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1258 assert(D->isCopyOrMoveConstructor() &&
1259 "trivial 1-arg ctor not a copy/move ctor");
1261 const Expr *E = (*ArgBeg);
1262 QualType Ty = E->getType();
1263 llvm::Value *Src = EmitLValue(E).getAddress();
1264 EmitAggregateCopy(This, Src, Ty);
1268 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
1269 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
1271 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
1275 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
1276 llvm::Value *This, llvm::Value *Src,
1277 CallExpr::const_arg_iterator ArgBeg,
1278 CallExpr::const_arg_iterator ArgEnd) {
1279 if (D->isTrivial()) {
1280 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1281 assert(D->isCopyOrMoveConstructor() &&
1282 "trivial 1-arg ctor not a copy/move ctor");
1283 EmitAggregateCopy(This, Src, (*ArgBeg)->getType());
1286 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D,
1287 clang::Ctor_Complete);
1288 assert(D->isInstance() &&
1289 "Trying to emit a member call expr on a static method!");
1291 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>();
1295 // Push the this ptr.
1296 Args.add(RValue::get(This), D->getThisType(getContext()));
1299 // Push the src ptr.
1300 QualType QT = *(FPT->arg_type_begin());
1301 llvm::Type *t = CGM.getTypes().ConvertType(QT);
1302 Src = Builder.CreateBitCast(Src, t);
1303 Args.add(RValue::get(Src), QT);
1305 // Skip over first argument (Src).
1307 CallExpr::const_arg_iterator Arg = ArgBeg;
1308 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1,
1309 E = FPT->arg_type_end(); I != E; ++I, ++Arg) {
1310 assert(Arg != ArgEnd && "Running over edge of argument list!");
1311 EmitCallArg(Args, *Arg, *I);
1313 // Either we've emitted all the call args, or we have a call to a
1314 // variadic function.
1315 assert((Arg == ArgEnd || FPT->isVariadic()) &&
1316 "Extra arguments in non-variadic function!");
1317 // If we still have any arguments, emit them using the type of the argument.
1318 for (; Arg != ArgEnd; ++Arg) {
1319 QualType ArgType = Arg->getType();
1320 EmitCallArg(Args, *Arg, ArgType);
1323 EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All),
1324 Callee, ReturnValueSlot(), Args, D);
1328 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1329 CXXCtorType CtorType,
1330 const FunctionArgList &Args) {
1331 CallArgList DelegateArgs;
1333 FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
1334 assert(I != E && "no parameters to constructor");
1337 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType());
1341 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
1342 /*ForVirtualBase=*/false)) {
1343 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
1344 DelegateArgs.add(RValue::get(VTT), VoidPP);
1346 if (CodeGenVTables::needsVTTParameter(CurGD)) {
1347 assert(I != E && "cannot skip vtt parameter, already done with args");
1348 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type");
1353 // Explicit arguments.
1354 for (; I != E; ++I) {
1355 const VarDecl *param = *I;
1356 EmitDelegateCallArg(DelegateArgs, param);
1359 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType),
1360 CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
1361 ReturnValueSlot(), DelegateArgs, Ctor);
1365 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup {
1366 const CXXDestructorDecl *Dtor;
1370 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr,
1372 : Dtor(D), Addr(Addr), Type(Type) {}
1374 void Emit(CodeGenFunction &CGF, Flags flags) {
1375 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
1382 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
1383 const FunctionArgList &Args) {
1384 assert(Ctor->isDelegatingConstructor());
1386 llvm::Value *ThisPtr = LoadCXXThis();
1388 QualType Ty = getContext().getTagDeclType(Ctor->getParent());
1389 CharUnits Alignment = getContext().getTypeAlignInChars(Ty);
1390 AggValueSlot AggSlot =
1391 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(),
1392 AggValueSlot::IsDestructed,
1393 AggValueSlot::DoesNotNeedGCBarriers,
1394 AggValueSlot::IsNotAliased);
1396 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
1398 const CXXRecordDecl *ClassDecl = Ctor->getParent();
1399 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
1401 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
1403 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
1404 ClassDecl->getDestructor(),
1409 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
1411 bool ForVirtualBase,
1412 llvm::Value *This) {
1413 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
1415 llvm::Value *Callee = 0;
1416 if (getContext().getLangOpts().AppleKext)
1417 Callee = BuildAppleKextVirtualDestructorCall(DD, Type,
1421 Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
1423 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
1427 struct CallLocalDtor : EHScopeStack::Cleanup {
1428 const CXXDestructorDecl *Dtor;
1431 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
1432 : Dtor(D), Addr(Addr) {}
1434 void Emit(CodeGenFunction &CGF, Flags flags) {
1435 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
1436 /*ForVirtualBase=*/false, Addr);
1441 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
1442 llvm::Value *Addr) {
1443 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
1446 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
1447 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
1448 if (!ClassDecl) return;
1449 if (ClassDecl->hasTrivialDestructor()) return;
1451 const CXXDestructorDecl *D = ClassDecl->getDestructor();
1452 assert(D && D->isUsed() && "destructor not marked as used!");
1453 PushDestructorCleanup(D, Addr);
1457 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
1458 const CXXRecordDecl *ClassDecl,
1459 const CXXRecordDecl *BaseClassDecl) {
1460 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy);
1461 CharUnits VBaseOffsetOffset =
1462 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
1464 llvm::Value *VBaseOffsetPtr =
1465 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(),
1466 "vbase.offset.ptr");
1467 llvm::Type *PtrDiffTy =
1468 ConvertType(getContext().getPointerDiffType());
1470 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
1471 PtrDiffTy->getPointerTo());
1473 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
1479 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
1480 const CXXRecordDecl *NearestVBase,
1481 CharUnits OffsetFromNearestVBase,
1482 llvm::Constant *VTable,
1483 const CXXRecordDecl *VTableClass) {
1484 const CXXRecordDecl *RD = Base.getBase();
1486 // Compute the address point.
1487 llvm::Value *VTableAddressPoint;
1489 // Check if we need to use a vtable from the VTT.
1490 if (CodeGenVTables::needsVTTParameter(CurGD) &&
1491 (RD->getNumVBases() || NearestVBase)) {
1492 // Get the secondary vpointer index.
1493 uint64_t VirtualPointerIndex =
1494 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
1497 llvm::Value *VTT = LoadCXXVTT();
1498 if (VirtualPointerIndex)
1499 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
1501 // And load the address point from the VTT.
1502 VTableAddressPoint = Builder.CreateLoad(VTT);
1504 uint64_t AddressPoint =
1505 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base);
1506 VTableAddressPoint =
1507 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
1510 // Compute where to store the address point.
1511 llvm::Value *VirtualOffset = 0;
1512 CharUnits NonVirtualOffset = CharUnits::Zero();
1514 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
1515 // We need to use the virtual base offset offset because the virtual base
1516 // might have a different offset in the most derived class.
1517 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
1519 NonVirtualOffset = OffsetFromNearestVBase;
1521 // We can just use the base offset in the complete class.
1522 NonVirtualOffset = Base.getBaseOffset();
1525 // Apply the offsets.
1526 llvm::Value *VTableField = LoadCXXThis();
1528 if (!NonVirtualOffset.isZero() || VirtualOffset)
1529 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
1533 // Finally, store the address point.
1534 llvm::Type *AddressPointPtrTy =
1535 VTableAddressPoint->getType()->getPointerTo();
1536 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
1537 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
1538 CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr());
1542 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
1543 const CXXRecordDecl *NearestVBase,
1544 CharUnits OffsetFromNearestVBase,
1545 bool BaseIsNonVirtualPrimaryBase,
1546 llvm::Constant *VTable,
1547 const CXXRecordDecl *VTableClass,
1548 VisitedVirtualBasesSetTy& VBases) {
1549 // If this base is a non-virtual primary base the address point has already
1551 if (!BaseIsNonVirtualPrimaryBase) {
1552 // Initialize the vtable pointer for this base.
1553 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
1554 VTable, VTableClass);
1557 const CXXRecordDecl *RD = Base.getBase();
1560 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1561 E = RD->bases_end(); I != E; ++I) {
1562 CXXRecordDecl *BaseDecl
1563 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1565 // Ignore classes without a vtable.
1566 if (!BaseDecl->isDynamicClass())
1569 CharUnits BaseOffset;
1570 CharUnits BaseOffsetFromNearestVBase;
1571 bool BaseDeclIsNonVirtualPrimaryBase;
1573 if (I->isVirtual()) {
1574 // Check if we've visited this virtual base before.
1575 if (!VBases.insert(BaseDecl))
1578 const ASTRecordLayout &Layout =
1579 getContext().getASTRecordLayout(VTableClass);
1581 BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
1582 BaseOffsetFromNearestVBase = CharUnits::Zero();
1583 BaseDeclIsNonVirtualPrimaryBase = false;
1585 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
1587 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
1588 BaseOffsetFromNearestVBase =
1589 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
1590 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
1593 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
1594 I->isVirtual() ? BaseDecl : NearestVBase,
1595 BaseOffsetFromNearestVBase,
1596 BaseDeclIsNonVirtualPrimaryBase,
1597 VTable, VTableClass, VBases);
1601 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
1602 // Ignore classes without a vtable.
1603 if (!RD->isDynamicClass())
1607 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
1609 // Initialize the vtable pointers for this class and all of its bases.
1610 VisitedVirtualBasesSetTy VBases;
1611 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()),
1613 /*OffsetFromNearestVBase=*/CharUnits::Zero(),
1614 /*BaseIsNonVirtualPrimaryBase=*/false,
1615 VTable, RD, VBases);
1618 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This,
1620 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo());
1621 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
1622 CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr());
1626 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) {
1627 const Expr *E = Base;
1630 E = E->IgnoreParens();
1631 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
1632 if (CE->getCastKind() == CK_DerivedToBase ||
1633 CE->getCastKind() == CK_UncheckedDerivedToBase ||
1634 CE->getCastKind() == CK_NoOp) {
1635 E = CE->getSubExpr();
1643 QualType DerivedType = E->getType();
1644 if (const PointerType *PTy = DerivedType->getAs<PointerType>())
1645 DerivedType = PTy->getPointeeType();
1647 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl());
1650 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do
1651 // quite what we want.
1652 static const Expr *skipNoOpCastsAndParens(const Expr *E) {
1654 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) {
1655 E = PE->getSubExpr();
1659 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
1660 if (CE->getCastKind() == CK_NoOp) {
1661 E = CE->getSubExpr();
1665 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) {
1666 if (UO->getOpcode() == UO_Extension) {
1667 E = UO->getSubExpr();
1675 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member
1676 /// function call on the given expr can be devirtualized.
1677 static bool canDevirtualizeMemberFunctionCall(const Expr *Base,
1678 const CXXMethodDecl *MD) {
1679 // If the most derived class is marked final, we know that no subclass can
1680 // override this member function and so we can devirtualize it. For example:
1682 // struct A { virtual void f(); }
1683 // struct B final : A { };
1689 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base);
1690 if (MostDerivedClassDecl->hasAttr<FinalAttr>())
1693 // If the member function is marked 'final', we know that it can't be
1694 // overridden and can therefore devirtualize it.
1695 if (MD->hasAttr<FinalAttr>())
1698 // Similarly, if the class itself is marked 'final' it can't be overridden
1699 // and we can therefore devirtualize the member function call.
1700 if (MD->getParent()->hasAttr<FinalAttr>())
1703 Base = skipNoOpCastsAndParens(Base);
1704 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) {
1705 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
1706 // This is a record decl. We know the type and can devirtualize it.
1707 return VD->getType()->isRecordType();
1713 // We can always devirtualize calls on temporary object expressions.
1714 if (isa<CXXConstructExpr>(Base))
1717 // And calls on bound temporaries.
1718 if (isa<CXXBindTemporaryExpr>(Base))
1721 // Check if this is a call expr that returns a record type.
1722 if (const CallExpr *CE = dyn_cast<CallExpr>(Base))
1723 return CE->getCallReturnType()->isRecordType();
1725 // We can't devirtualize the call.
1729 static bool UseVirtualCall(ASTContext &Context,
1730 const CXXOperatorCallExpr *CE,
1731 const CXXMethodDecl *MD) {
1732 if (!MD->isVirtual())
1735 // When building with -fapple-kext, all calls must go through the vtable since
1736 // the kernel linker can do runtime patching of vtables.
1737 if (Context.getLangOpts().AppleKext)
1740 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD);
1744 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E,
1745 const CXXMethodDecl *MD,
1746 llvm::Value *This) {
1747 llvm::FunctionType *fnType =
1748 CGM.getTypes().GetFunctionType(
1749 CGM.getTypes().arrangeCXXMethodDeclaration(MD));
1751 if (UseVirtualCall(getContext(), E, MD))
1752 return BuildVirtualCall(MD, This, fnType);
1754 return CGM.GetAddrOfFunction(MD, fnType);
1757 void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *lambda,
1758 CallArgList &callArgs) {
1759 // Lookup the call operator
1760 DeclarationName operatorName
1761 = getContext().DeclarationNames.getCXXOperatorName(OO_Call);
1762 CXXMethodDecl *callOperator =
1763 cast<CXXMethodDecl>(*lambda->lookup(operatorName).first);
1765 // Get the address of the call operator.
1766 const CGFunctionInfo &calleeFnInfo =
1767 CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
1768 llvm::Value *callee =
1769 CGM.GetAddrOfFunction(GlobalDecl(callOperator),
1770 CGM.getTypes().GetFunctionType(calleeFnInfo));
1772 // Prepare the return slot.
1773 const FunctionProtoType *FPT =
1774 callOperator->getType()->castAs<FunctionProtoType>();
1775 QualType resultType = FPT->getResultType();
1776 ReturnValueSlot returnSlot;
1777 if (!resultType->isVoidType() &&
1778 calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect &&
1779 hasAggregateLLVMType(calleeFnInfo.getReturnType()))
1780 returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified());
1782 // We don't need to separately arrange the call arguments because
1783 // the call can't be variadic anyway --- it's impossible to forward
1784 // variadic arguments.
1786 // Now emit our call.
1787 RValue RV = EmitCall(calleeFnInfo, callee, returnSlot,
1788 callArgs, callOperator);
1790 // If necessary, copy the returned value into the slot.
1791 if (!resultType->isVoidType() && returnSlot.isNull())
1792 EmitReturnOfRValue(RV, resultType);
1795 void CodeGenFunction::EmitLambdaBlockInvokeBody() {
1796 const BlockDecl *BD = BlockInfo->getBlockDecl();
1797 const VarDecl *variable = BD->capture_begin()->getVariable();
1798 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
1800 // Start building arguments for forwarding call
1801 CallArgList CallArgs;
1803 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
1804 llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false);
1805 CallArgs.add(RValue::get(ThisPtr), ThisType);
1807 // Add the rest of the parameters.
1808 for (BlockDecl::param_const_iterator I = BD->param_begin(),
1809 E = BD->param_end(); I != E; ++I) {
1810 ParmVarDecl *param = *I;
1811 EmitDelegateCallArg(CallArgs, param);
1814 EmitForwardingCallToLambda(Lambda, CallArgs);
1817 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) {
1818 if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) {
1819 // FIXME: Making this work correctly is nasty because it requires either
1820 // cloning the body of the call operator or making the call operator forward.
1821 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function");
1825 EmitFunctionBody(Args);
1828 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
1829 const CXXRecordDecl *Lambda = MD->getParent();
1831 // Start building arguments for forwarding call
1832 CallArgList CallArgs;
1834 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
1835 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType));
1836 CallArgs.add(RValue::get(ThisPtr), ThisType);
1838 // Add the rest of the parameters.
1839 for (FunctionDecl::param_const_iterator I = MD->param_begin(),
1840 E = MD->param_end(); I != E; ++I) {
1841 ParmVarDecl *param = *I;
1842 EmitDelegateCallArg(CallArgs, param);
1845 EmitForwardingCallToLambda(Lambda, CallArgs);
1848 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) {
1849 if (MD->isVariadic()) {
1850 // FIXME: Making this work correctly is nasty because it requires either
1851 // cloning the body of the call operator or making the call operator forward.
1852 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
1856 EmitLambdaDelegatingInvokeBody(MD);