1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This contains code dealing with C++ code generation of classes
12 //===----------------------------------------------------------------------===//
14 #include "CGDebugInfo.h"
15 #include "CodeGenFunction.h"
16 #include "clang/AST/CXXInheritance.h"
17 #include "clang/AST/EvaluatedExprVisitor.h"
18 #include "clang/AST/RecordLayout.h"
19 #include "clang/AST/StmtCXX.h"
20 #include "clang/Frontend/CodeGenOptions.h"
22 using namespace clang;
23 using namespace CodeGen;
26 ComputeNonVirtualBaseClassOffset(ASTContext &Context,
27 const CXXRecordDecl *DerivedClass,
28 CastExpr::path_const_iterator Start,
29 CastExpr::path_const_iterator End) {
32 const CXXRecordDecl *RD = DerivedClass;
34 for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
35 const CXXBaseSpecifier *Base = *I;
36 assert(!Base->isVirtual() && "Should not see virtual bases here!");
39 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
41 const CXXRecordDecl *BaseDecl =
42 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
45 Offset += Layout.getBaseClassOffsetInBits(BaseDecl);
50 // FIXME: We should not use / 8 here.
55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
56 CastExpr::path_const_iterator PathBegin,
57 CastExpr::path_const_iterator PathEnd) {
58 assert(PathBegin != PathEnd && "Base path should not be empty!");
61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
66 const llvm::Type *PtrDiffTy =
67 Types.ConvertType(getContext().getPointerDiffType());
69 return llvm::ConstantInt::get(PtrDiffTy, Offset);
72 /// Gets the address of a direct base class within a complete object.
73 /// This should only be used for (1) non-virtual bases or (2) virtual bases
74 /// when the type is known to be complete (e.g. in complete destructors).
76 /// The object pointed to by 'This' is assumed to be non-null.
78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
79 const CXXRecordDecl *Derived,
80 const CXXRecordDecl *Base,
82 // 'this' must be a pointer (in some address space) to Derived.
83 assert(This->getType()->isPointerTy() &&
84 cast<llvm::PointerType>(This->getType())->getElementType()
85 == ConvertType(Derived));
87 // Compute the offset of the virtual base.
89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
91 Offset = Layout.getVBaseClassOffsetInBits(Base);
93 Offset = Layout.getBaseClassOffsetInBits(Base);
95 // Shift and cast down to the base type.
96 // TODO: for complete types, this should be possible with a GEP.
97 llvm::Value *V = This;
99 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext());
100 V = Builder.CreateBitCast(V, Int8PtrTy);
101 V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8);
103 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
109 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr,
110 uint64_t NonVirtual, llvm::Value *Virtual) {
111 const llvm::Type *PtrDiffTy =
112 CGF.ConvertType(CGF.getContext().getPointerDiffType());
114 llvm::Value *NonVirtualOffset = 0;
116 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual);
118 llvm::Value *BaseOffset;
120 if (NonVirtualOffset)
121 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset);
123 BaseOffset = Virtual;
125 BaseOffset = NonVirtualOffset;
127 // Apply the base offset.
128 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
129 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy);
130 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr");
136 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
137 const CXXRecordDecl *Derived,
138 CastExpr::path_const_iterator PathBegin,
139 CastExpr::path_const_iterator PathEnd,
140 bool NullCheckValue) {
141 assert(PathBegin != PathEnd && "Base path should not be empty!");
143 CastExpr::path_const_iterator Start = PathBegin;
144 const CXXRecordDecl *VBase = 0;
146 // Get the virtual base.
147 if ((*Start)->isVirtual()) {
149 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
153 uint64_t NonVirtualOffset =
154 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
157 // Get the base pointer type.
158 const llvm::Type *BasePtrTy =
159 ConvertType((PathEnd[-1])->getType())->getPointerTo();
161 if (!NonVirtualOffset && !VBase) {
163 return Builder.CreateBitCast(Value, BasePtrTy);
166 llvm::BasicBlock *CastNull = 0;
167 llvm::BasicBlock *CastNotNull = 0;
168 llvm::BasicBlock *CastEnd = 0;
170 if (NullCheckValue) {
171 CastNull = createBasicBlock("cast.null");
172 CastNotNull = createBasicBlock("cast.notnull");
173 CastEnd = createBasicBlock("cast.end");
175 llvm::Value *IsNull =
176 Builder.CreateICmpEQ(Value,
177 llvm::Constant::getNullValue(Value->getType()));
178 Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
179 EmitBlock(CastNotNull);
182 llvm::Value *VirtualOffset = 0;
185 if (Derived->hasAttr<FinalAttr>()) {
188 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
190 uint64_t VBaseOffset = Layout.getVBaseClassOffsetInBits(VBase);
191 NonVirtualOffset += VBaseOffset / 8;
193 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
196 // Apply the offsets.
197 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
201 Value = Builder.CreateBitCast(Value, BasePtrTy);
203 if (NullCheckValue) {
204 Builder.CreateBr(CastEnd);
206 Builder.CreateBr(CastEnd);
209 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
210 PHI->reserveOperandSpace(2);
211 PHI->addIncoming(Value, CastNotNull);
212 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
221 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
222 const CXXRecordDecl *Derived,
223 CastExpr::path_const_iterator PathBegin,
224 CastExpr::path_const_iterator PathEnd,
225 bool NullCheckValue) {
226 assert(PathBegin != PathEnd && "Base path should not be empty!");
229 getContext().getCanonicalType(getContext().getTagDeclType(Derived));
230 const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
232 llvm::Value *NonVirtualOffset =
233 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
235 if (!NonVirtualOffset) {
236 // No offset, we can just cast back.
237 return Builder.CreateBitCast(Value, DerivedPtrTy);
240 llvm::BasicBlock *CastNull = 0;
241 llvm::BasicBlock *CastNotNull = 0;
242 llvm::BasicBlock *CastEnd = 0;
244 if (NullCheckValue) {
245 CastNull = createBasicBlock("cast.null");
246 CastNotNull = createBasicBlock("cast.notnull");
247 CastEnd = createBasicBlock("cast.end");
249 llvm::Value *IsNull =
250 Builder.CreateICmpEQ(Value,
251 llvm::Constant::getNullValue(Value->getType()));
252 Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
253 EmitBlock(CastNotNull);
257 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType());
258 Value = Builder.CreateSub(Value, NonVirtualOffset);
259 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy);
262 Value = Builder.CreateBitCast(Value, DerivedPtrTy);
264 if (NullCheckValue) {
265 Builder.CreateBr(CastEnd);
267 Builder.CreateBr(CastEnd);
270 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
271 PHI->reserveOperandSpace(2);
272 PHI->addIncoming(Value, CastNotNull);
273 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
281 /// GetVTTParameter - Return the VTT parameter that should be passed to a
282 /// base constructor/destructor with virtual bases.
283 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
284 bool ForVirtualBase) {
285 if (!CodeGenVTables::needsVTTParameter(GD)) {
286 // This constructor/destructor does not need a VTT parameter.
290 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
291 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
295 uint64_t SubVTTIndex;
297 // If the record matches the base, this is the complete ctor/dtor
298 // variant calling the base variant in a class with virtual bases.
300 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
301 "doing no-op VTT offset in base dtor/ctor?");
302 assert(!ForVirtualBase && "Can't have same class as virtual base!");
305 const ASTRecordLayout &Layout =
306 CGF.getContext().getASTRecordLayout(RD);
307 uint64_t BaseOffset = ForVirtualBase ?
308 Layout.getVBaseClassOffsetInBits(Base) :
309 Layout.getBaseClassOffsetInBits(Base);
312 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
313 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
316 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
317 // A VTT parameter was passed to the constructor, use it.
318 VTT = CGF.LoadCXXVTT();
319 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
321 // We're the complete constructor, so get the VTT by name.
322 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD);
323 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
330 /// Call the destructor for a direct base class.
331 struct CallBaseDtor : EHScopeStack::Cleanup {
332 const CXXRecordDecl *BaseClass;
334 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
335 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
337 void Emit(CodeGenFunction &CGF, bool IsForEH) {
338 const CXXRecordDecl *DerivedClass =
339 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
341 const CXXDestructorDecl *D = BaseClass->getDestructor();
343 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
344 DerivedClass, BaseClass,
346 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
350 /// A visitor which checks whether an initializer uses 'this' in a
351 /// way which requires the vtable to be properly set.
352 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> {
353 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super;
357 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {}
359 // Black-list all explicit and implicit references to 'this'.
361 // Do we need to worry about external references to 'this' derived
362 // from arbitrary code? If so, then anything which runs arbitrary
363 // external code might potentially access the vtable.
364 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; }
368 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
369 DynamicThisUseChecker Checker(C);
370 Checker.Visit(const_cast<Expr*>(Init));
371 return Checker.UsesThis;
374 static void EmitBaseInitializer(CodeGenFunction &CGF,
375 const CXXRecordDecl *ClassDecl,
376 CXXCtorInitializer *BaseInit,
377 CXXCtorType CtorType) {
378 assert(BaseInit->isBaseInitializer() &&
379 "Must have base initializer!");
381 llvm::Value *ThisPtr = CGF.LoadCXXThis();
383 const Type *BaseType = BaseInit->getBaseClass();
384 CXXRecordDecl *BaseClassDecl =
385 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
387 bool isBaseVirtual = BaseInit->isBaseVirtual();
389 // The base constructor doesn't construct virtual bases.
390 if (CtorType == Ctor_Base && isBaseVirtual)
393 // If the initializer for the base (other than the constructor
394 // itself) accesses 'this' in any way, we need to initialize the
396 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
397 CGF.InitializeVTablePointers(ClassDecl);
399 // We can pretend to be a complete class because it only matters for
400 // virtual bases, and we only do virtual bases for complete ctors.
402 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
406 AggValueSlot AggSlot = AggValueSlot::forAddr(V, false, /*Lifetime*/ true);
408 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
410 if (CGF.CGM.getLangOptions().areExceptionsEnabled() &&
411 !BaseClassDecl->hasTrivialDestructor())
412 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
416 static void EmitAggMemberInitializer(CodeGenFunction &CGF,
418 llvm::Value *ArrayIndexVar,
419 CXXCtorInitializer *MemberInit,
422 if (Index == MemberInit->getNumArrayIndices()) {
423 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
425 llvm::Value *Dest = LHS.getAddress();
427 // If we have an array index variable, load it and use it as an offset.
428 // Then, increment the value.
429 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
430 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
431 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
432 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
433 CGF.Builder.CreateStore(Next, ArrayIndexVar);
436 AggValueSlot Slot = AggValueSlot::forAddr(Dest, LHS.isVolatileQualified(),
439 CGF.EmitAggExpr(MemberInit->getInit(), Slot);
444 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
445 assert(Array && "Array initialization without the array type?");
446 llvm::Value *IndexVar
447 = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index));
448 assert(IndexVar && "Array index variable not loaded");
450 // Initialize this index variable to zero.
452 = llvm::Constant::getNullValue(
453 CGF.ConvertType(CGF.getContext().getSizeType()));
454 CGF.Builder.CreateStore(Zero, IndexVar);
456 // Start the loop with a block that tests the condition.
457 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
458 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
460 CGF.EmitBlock(CondBlock);
462 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
463 // Generate: if (loop-index < number-of-elements) fall to the loop body,
464 // otherwise, go to the block after the for-loop.
465 uint64_t NumElements = Array->getSize().getZExtValue();
466 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
467 llvm::Value *NumElementsPtr =
468 llvm::ConstantInt::get(Counter->getType(), NumElements);
469 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
472 // If the condition is true, execute the body.
473 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
475 CGF.EmitBlock(ForBody);
476 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
479 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
481 // Inside the loop body recurse to emit the inner loop or, eventually, the
483 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit,
484 Array->getElementType(), Index + 1);
487 CGF.EmitBlock(ContinueBlock);
489 // Emit the increment of the loop counter.
490 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
491 Counter = CGF.Builder.CreateLoad(IndexVar);
492 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
493 CGF.Builder.CreateStore(NextVal, IndexVar);
495 // Finally, branch back up to the condition for the next iteration.
496 CGF.EmitBranch(CondBlock);
498 // Emit the fall-through block.
499 CGF.EmitBlock(AfterFor, true);
503 struct CallMemberDtor : EHScopeStack::Cleanup {
505 CXXDestructorDecl *Dtor;
507 CallMemberDtor(FieldDecl *Field, CXXDestructorDecl *Dtor)
508 : Field(Field), Dtor(Dtor) {}
510 void Emit(CodeGenFunction &CGF, bool IsForEH) {
511 // FIXME: Is this OK for C++0x delegating constructors?
512 llvm::Value *ThisPtr = CGF.LoadCXXThis();
513 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0);
515 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
521 static void EmitMemberInitializer(CodeGenFunction &CGF,
522 const CXXRecordDecl *ClassDecl,
523 CXXCtorInitializer *MemberInit,
524 const CXXConstructorDecl *Constructor,
525 FunctionArgList &Args) {
526 assert(MemberInit->isAnyMemberInitializer() &&
527 "Must have member initializer!");
529 // non-static data member initializers.
530 FieldDecl *Field = MemberInit->getAnyMember();
531 QualType FieldType = CGF.getContext().getCanonicalType(Field->getType());
533 llvm::Value *ThisPtr = CGF.LoadCXXThis();
536 // If we are initializing an anonymous union field, drill down to the field.
537 if (MemberInit->isIndirectMemberInitializer()) {
538 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr,
539 MemberInit->getIndirectMember(), 0);
540 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType();
542 LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0);
545 // FIXME: If there's no initializer and the CXXCtorInitializer
546 // was implicitly generated, we shouldn't be zeroing memory.
548 if (FieldType->isReferenceType()) {
549 RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), Field);
550 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
551 } else if (FieldType->isArrayType() && !MemberInit->getInit()) {
552 CGF.EmitNullInitialization(LHS.getAddress(), Field->getType());
553 } else if (!CGF.hasAggregateLLVMType(Field->getType())) {
554 RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit()));
555 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
556 } else if (MemberInit->getInit()->getType()->isAnyComplexType()) {
557 CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(),
558 LHS.isVolatileQualified());
560 llvm::Value *ArrayIndexVar = 0;
561 const ConstantArrayType *Array
562 = CGF.getContext().getAsConstantArrayType(FieldType);
563 if (Array && Constructor->isImplicit() &&
564 Constructor->isCopyConstructor()) {
565 const llvm::Type *SizeTy
566 = CGF.ConvertType(CGF.getContext().getSizeType());
568 // The LHS is a pointer to the first object we'll be constructing, as
570 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
571 const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy);
572 BasePtr = llvm::PointerType::getUnqual(BasePtr);
573 llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(),
575 LHS = CGF.MakeAddrLValue(BaseAddrPtr, BaseElementTy);
577 // Create an array index that will be used to walk over all of the
578 // objects we're constructing.
579 ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index");
580 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
581 CGF.Builder.CreateStore(Zero, ArrayIndexVar);
583 // If we are copying an array of scalars or classes with trivial copy
584 // constructors, perform a single aggregate copy.
585 const RecordType *Record = BaseElementTy->getAs<RecordType>();
587 cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) {
588 // Find the source pointer. We knows it's the last argument because
589 // we know we're in a copy constructor.
590 unsigned SrcArgIndex = Args.size() - 1;
592 = CGF.Builder.CreateLoad(
593 CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first));
594 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0);
596 // Copy the aggregate.
597 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
598 LHS.isVolatileQualified());
602 // Emit the block variables for the array indices, if any.
603 for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I)
604 CGF.EmitAutoVarDecl(*MemberInit->getArrayIndex(I));
607 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0);
609 if (!CGF.CGM.getLangOptions().areExceptionsEnabled())
612 // FIXME: If we have an array of classes w/ non-trivial destructors,
613 // we need to destroy in reverse order of construction along the exception
615 const RecordType *RT = FieldType->getAs<RecordType>();
619 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
620 if (!RD->hasTrivialDestructor())
621 CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
622 RD->getDestructor());
626 /// Checks whether the given constructor is a valid subject for the
627 /// complete-to-base constructor delegation optimization, i.e.
628 /// emitting the complete constructor as a simple call to the base
630 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
632 // Currently we disable the optimization for classes with virtual
633 // bases because (1) the addresses of parameter variables need to be
634 // consistent across all initializers but (2) the delegate function
635 // call necessarily creates a second copy of the parameter variable.
637 // The limiting example (purely theoretical AFAIK):
638 // struct A { A(int &c) { c++; } };
639 // struct B : virtual A {
640 // B(int count) : A(count) { printf("%d\n", count); }
642 // ...although even this example could in principle be emitted as a
643 // delegation since the address of the parameter doesn't escape.
644 if (Ctor->getParent()->getNumVBases()) {
645 // TODO: white-list trivial vbase initializers. This case wouldn't
646 // be subject to the restrictions below.
648 // TODO: white-list cases where:
649 // - there are no non-reference parameters to the constructor
650 // - the initializers don't access any non-reference parameters
651 // - the initializers don't take the address of non-reference
654 // If we ever add any of the above cases, remember that:
655 // - function-try-blocks will always blacklist this optimization
656 // - we need to perform the constructor prologue and cleanup in
657 // EmitConstructorBody.
662 // We also disable the optimization for variadic functions because
663 // it's impossible to "re-pass" varargs.
664 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
670 /// EmitConstructorBody - Emits the body of the current constructor.
671 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
672 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
673 CXXCtorType CtorType = CurGD.getCtorType();
675 // Before we go any further, try the complete->base constructor
676 // delegation optimization.
677 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) {
678 if (CGDebugInfo *DI = getDebugInfo())
679 DI->EmitStopPoint(Builder);
680 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
684 Stmt *Body = Ctor->getBody();
686 // Enter the function-try-block before the constructor prologue if
688 bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
690 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
692 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
694 // Emit the constructor prologue, i.e. the base and member
696 EmitCtorPrologue(Ctor, CtorType, Args);
698 // Emit the body of the statement.
700 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
704 // Emit any cleanup blocks associated with the member or base
705 // initializers, which includes (along the exceptional path) the
706 // destructors for those members and bases that were fully
708 PopCleanupBlocks(CleanupDepth);
711 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
714 /// EmitCtorPrologue - This routine generates necessary code to initialize
715 /// base classes and non-static data members belonging to this constructor.
716 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
717 CXXCtorType CtorType,
718 FunctionArgList &Args) {
719 const CXXRecordDecl *ClassDecl = CD->getParent();
721 llvm::SmallVector<CXXCtorInitializer *, 8> MemberInitializers;
723 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
726 CXXCtorInitializer *Member = (*B);
728 if (Member->isBaseInitializer())
729 EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
731 MemberInitializers.push_back(Member);
734 InitializeVTablePointers(ClassDecl);
736 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
737 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
740 /// EmitDestructorBody - Emits the body of the current destructor.
741 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
742 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
743 CXXDtorType DtorType = CurGD.getDtorType();
745 // The call to operator delete in a deleting destructor happens
746 // outside of the function-try-block, which means it's always
747 // possible to delegate the destructor body to the complete
748 // destructor. Do so.
749 if (DtorType == Dtor_Deleting) {
750 EnterDtorCleanups(Dtor, Dtor_Deleting);
751 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
757 Stmt *Body = Dtor->getBody();
759 // If the body is a function-try-block, enter the try before
761 bool isTryBody = (Body && isa<CXXTryStmt>(Body));
763 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
765 // Enter the epilogue cleanups.
766 RunCleanupsScope DtorEpilogue(*this);
768 // If this is the complete variant, just invoke the base variant;
769 // the epilogue will destruct the virtual bases. But we can't do
770 // this optimization if the body is a function-try-block, because
771 // we'd introduce *two* handler blocks.
773 case Dtor_Deleting: llvm_unreachable("already handled deleting case");
776 // Enter the cleanup scopes for virtual bases.
777 EnterDtorCleanups(Dtor, Dtor_Complete);
780 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
784 // Fallthrough: act like we're in the base variant.
787 // Enter the cleanup scopes for fields and non-virtual bases.
788 EnterDtorCleanups(Dtor, Dtor_Base);
790 // Initialize the vtable pointers before entering the body.
791 InitializeVTablePointers(Dtor->getParent());
794 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
798 assert(Dtor->isImplicit() && "bodyless dtor not implicit");
799 // nothing to do besides what's in the epilogue
801 // -fapple-kext must inline any call to this dtor into
802 // the caller's body.
803 if (getContext().getLangOptions().AppleKext)
804 CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
808 // Jump out through the epilogue cleanups.
809 DtorEpilogue.ForceCleanup();
811 // Exit the try if applicable.
813 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
817 /// Call the operator delete associated with the current destructor.
818 struct CallDtorDelete : EHScopeStack::Cleanup {
821 void Emit(CodeGenFunction &CGF, bool IsForEH) {
822 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
823 const CXXRecordDecl *ClassDecl = Dtor->getParent();
824 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
825 CGF.getContext().getTagDeclType(ClassDecl));
829 struct CallArrayFieldDtor : EHScopeStack::Cleanup {
830 const FieldDecl *Field;
831 CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
833 void Emit(CodeGenFunction &CGF, bool IsForEH) {
834 QualType FieldType = Field->getType();
835 const ConstantArrayType *Array =
836 CGF.getContext().getAsConstantArrayType(FieldType);
839 CGF.getContext().getBaseElementType(Array->getElementType());
840 const CXXRecordDecl *FieldClassDecl = BaseType->getAsCXXRecordDecl();
842 llvm::Value *ThisPtr = CGF.LoadCXXThis();
843 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
844 // FIXME: Qualifiers?
845 /*CVRQualifiers=*/0);
847 const llvm::Type *BasePtr = CGF.ConvertType(BaseType)->getPointerTo();
848 llvm::Value *BaseAddrPtr =
849 CGF.Builder.CreateBitCast(LHS.getAddress(), BasePtr);
850 CGF.EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(),
855 struct CallFieldDtor : EHScopeStack::Cleanup {
856 const FieldDecl *Field;
857 CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
859 void Emit(CodeGenFunction &CGF, bool IsForEH) {
860 const CXXRecordDecl *FieldClassDecl =
861 Field->getType()->getAsCXXRecordDecl();
863 llvm::Value *ThisPtr = CGF.LoadCXXThis();
864 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
865 // FIXME: Qualifiers?
866 /*CVRQualifiers=*/0);
868 CGF.EmitCXXDestructorCall(FieldClassDecl->getDestructor(),
869 Dtor_Complete, /*ForVirtualBase=*/false,
875 /// EmitDtorEpilogue - Emit all code that comes at the end of class's
876 /// destructor. This is to call destructors on members and base classes
877 /// in reverse order of their construction.
878 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
879 CXXDtorType DtorType) {
880 assert(!DD->isTrivial() &&
881 "Should not emit dtor epilogue for trivial dtor!");
883 // The deleting-destructor phase just needs to call the appropriate
884 // operator delete that Sema picked up.
885 if (DtorType == Dtor_Deleting) {
886 assert(DD->getOperatorDelete() &&
887 "operator delete missing - EmitDtorEpilogue");
888 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
892 const CXXRecordDecl *ClassDecl = DD->getParent();
894 // The complete-destructor phase just destructs all the virtual bases.
895 if (DtorType == Dtor_Complete) {
897 // We push them in the forward order so that they'll be popped in
898 // the reverse order.
899 for (CXXRecordDecl::base_class_const_iterator I =
900 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
902 const CXXBaseSpecifier &Base = *I;
903 CXXRecordDecl *BaseClassDecl
904 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
906 // Ignore trivial destructors.
907 if (BaseClassDecl->hasTrivialDestructor())
910 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
912 /*BaseIsVirtual*/ true);
918 assert(DtorType == Dtor_Base);
920 // Destroy non-virtual bases.
921 for (CXXRecordDecl::base_class_const_iterator I =
922 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
923 const CXXBaseSpecifier &Base = *I;
925 // Ignore virtual bases.
926 if (Base.isVirtual())
929 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
931 // Ignore trivial destructors.
932 if (BaseClassDecl->hasTrivialDestructor())
935 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
937 /*BaseIsVirtual*/ false);
940 // Destroy direct fields.
941 llvm::SmallVector<const FieldDecl *, 16> FieldDecls;
942 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
943 E = ClassDecl->field_end(); I != E; ++I) {
944 const FieldDecl *Field = *I;
946 QualType FieldType = getContext().getCanonicalType(Field->getType());
947 const ConstantArrayType *Array =
948 getContext().getAsConstantArrayType(FieldType);
950 FieldType = getContext().getBaseElementType(Array->getElementType());
952 const RecordType *RT = FieldType->getAs<RecordType>();
956 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
957 if (FieldClassDecl->hasTrivialDestructor())
961 EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
963 EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
967 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested)
968 /// for-loop to call the default constructor on individual members of the
970 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the
971 /// array type and 'ArrayPtr' points to the beginning fo the array.
972 /// It is assumed that all relevant checks have been made by the caller.
974 /// \param ZeroInitialization True if each element should be zero-initialized
975 /// before it is constructed.
977 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
978 const ConstantArrayType *ArrayTy,
979 llvm::Value *ArrayPtr,
980 CallExpr::const_arg_iterator ArgBeg,
981 CallExpr::const_arg_iterator ArgEnd,
982 bool ZeroInitialization) {
984 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
985 llvm::Value * NumElements =
986 llvm::ConstantInt::get(SizeTy,
987 getContext().getConstantArrayElementCount(ArrayTy));
989 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd,
994 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
995 llvm::Value *NumElements,
996 llvm::Value *ArrayPtr,
997 CallExpr::const_arg_iterator ArgBeg,
998 CallExpr::const_arg_iterator ArgEnd,
999 bool ZeroInitialization) {
1000 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
1002 // Create a temporary for the loop index and initialize it with 0.
1003 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index");
1004 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
1005 Builder.CreateStore(Zero, IndexPtr);
1007 // Start the loop with a block that tests the condition.
1008 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
1009 llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
1011 EmitBlock(CondBlock);
1013 llvm::BasicBlock *ForBody = createBasicBlock("for.body");
1015 // Generate: if (loop-index < number-of-elements fall to the loop body,
1016 // otherwise, go to the block after the for-loop.
1017 llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
1018 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless");
1019 // If the condition is true, execute the body.
1020 Builder.CreateCondBr(IsLess, ForBody, AfterFor);
1024 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
1025 // Inside the loop body, emit the constructor call on the array element.
1026 Counter = Builder.CreateLoad(IndexPtr);
1027 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter,
1030 // Zero initialize the storage, if requested.
1031 if (ZeroInitialization)
1032 EmitNullInitialization(Address,
1033 getContext().getTypeDeclType(D->getParent()));
1035 // C++ [class.temporary]p4:
1036 // There are two contexts in which temporaries are destroyed at a different
1037 // point than the end of the full-expression. The first context is when a
1038 // default constructor is called to initialize an element of an array.
1039 // If the constructor has one or more default arguments, the destruction of
1040 // every temporary created in a default argument expression is sequenced
1041 // before the construction of the next array element, if any.
1043 // Keep track of the current number of live temporaries.
1045 RunCleanupsScope Scope(*this);
1047 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address,
1051 EmitBlock(ContinueBlock);
1053 // Emit the increment of the loop counter.
1054 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1);
1055 Counter = Builder.CreateLoad(IndexPtr);
1056 NextVal = Builder.CreateAdd(Counter, NextVal, "inc");
1057 Builder.CreateStore(NextVal, IndexPtr);
1059 // Finally, branch back up to the condition for the next iteration.
1060 EmitBranch(CondBlock);
1062 // Emit the fall-through block.
1063 EmitBlock(AfterFor, true);
1066 /// EmitCXXAggrDestructorCall - calls the default destructor on array
1067 /// elements in reverse order of construction.
1069 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1070 const ArrayType *Array,
1071 llvm::Value *This) {
1072 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array);
1073 assert(CA && "Do we support VLA for destruction ?");
1074 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA);
1076 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
1077 llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount);
1078 EmitCXXAggrDestructorCall(D, ElementCountPtr, This);
1081 /// EmitCXXAggrDestructorCall - calls the default destructor on array
1082 /// elements in reverse order of construction.
1084 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1085 llvm::Value *UpperCount,
1086 llvm::Value *This) {
1087 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
1088 llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1);
1090 // Create a temporary for the loop index and initialize it with count of
1092 llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index");
1094 // Store the number of elements in the index pointer.
1095 Builder.CreateStore(UpperCount, IndexPtr);
1097 // Start the loop with a block that tests the condition.
1098 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
1099 llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
1101 EmitBlock(CondBlock);
1103 llvm::BasicBlock *ForBody = createBasicBlock("for.body");
1105 // Generate: if (loop-index != 0 fall to the loop body,
1106 // otherwise, go to the block after the for-loop.
1107 llvm::Value* zeroConstant =
1108 llvm::Constant::getNullValue(SizeLTy);
1109 llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
1110 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant,
1112 // If the condition is true, execute the body.
1113 Builder.CreateCondBr(IsNE, ForBody, AfterFor);
1117 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
1118 // Inside the loop body, emit the constructor call on the array element.
1119 Counter = Builder.CreateLoad(IndexPtr);
1120 Counter = Builder.CreateSub(Counter, One);
1121 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx");
1122 EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address);
1124 EmitBlock(ContinueBlock);
1126 // Emit the decrement of the loop counter.
1127 Counter = Builder.CreateLoad(IndexPtr);
1128 Counter = Builder.CreateSub(Counter, One, "dec");
1129 Builder.CreateStore(Counter, IndexPtr);
1131 // Finally, branch back up to the condition for the next iteration.
1132 EmitBranch(CondBlock);
1134 // Emit the fall-through block.
1135 EmitBlock(AfterFor, true);
1139 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
1140 CXXCtorType Type, bool ForVirtualBase,
1142 CallExpr::const_arg_iterator ArgBeg,
1143 CallExpr::const_arg_iterator ArgEnd) {
1145 CGDebugInfo *DI = getDebugInfo();
1146 if (DI && CGM.getCodeGenOpts().LimitDebugInfo) {
1147 // If debug info for this class has been emitted then this is the right time
1149 const CXXRecordDecl *Parent = D->getParent();
1150 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent),
1151 Parent->getLocation());
1154 if (D->isTrivial()) {
1155 if (ArgBeg == ArgEnd) {
1156 // Trivial default constructor, no codegen required.
1157 assert(D->isDefaultConstructor() &&
1158 "trivial 0-arg ctor not a default ctor");
1162 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1163 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor");
1165 const Expr *E = (*ArgBeg);
1166 QualType Ty = E->getType();
1167 llvm::Value *Src = EmitLValue(E).getAddress();
1168 EmitAggregateCopy(This, Src, Ty);
1172 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
1173 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
1175 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
1179 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
1180 llvm::Value *This, llvm::Value *Src,
1181 CallExpr::const_arg_iterator ArgBeg,
1182 CallExpr::const_arg_iterator ArgEnd) {
1183 if (D->isTrivial()) {
1184 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1185 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor");
1186 EmitAggregateCopy(This, Src, (*ArgBeg)->getType());
1189 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D,
1190 clang::Ctor_Complete);
1191 assert(D->isInstance() &&
1192 "Trying to emit a member call expr on a static method!");
1194 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>();
1198 // Push the this ptr.
1199 Args.push_back(std::make_pair(RValue::get(This),
1200 D->getThisType(getContext())));
1203 // Push the src ptr.
1204 QualType QT = *(FPT->arg_type_begin());
1205 const llvm::Type *t = CGM.getTypes().ConvertType(QT);
1206 Src = Builder.CreateBitCast(Src, t);
1207 Args.push_back(std::make_pair(RValue::get(Src), QT));
1209 // Skip over first argument (Src).
1211 CallExpr::const_arg_iterator Arg = ArgBeg;
1212 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1,
1213 E = FPT->arg_type_end(); I != E; ++I, ++Arg) {
1214 assert(Arg != ArgEnd && "Running over edge of argument list!");
1215 QualType ArgType = *I;
1216 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType),
1219 // Either we've emitted all the call args, or we have a call to a
1220 // variadic function.
1221 assert((Arg == ArgEnd || FPT->isVariadic()) &&
1222 "Extra arguments in non-variadic function!");
1223 // If we still have any arguments, emit them using the type of the argument.
1224 for (; Arg != ArgEnd; ++Arg) {
1225 QualType ArgType = Arg->getType();
1226 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType),
1230 QualType ResultType = FPT->getResultType();
1231 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, Args,
1233 Callee, ReturnValueSlot(), Args, D);
1237 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1238 CXXCtorType CtorType,
1239 const FunctionArgList &Args) {
1240 CallArgList DelegateArgs;
1242 FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
1243 assert(I != E && "no parameters to constructor");
1246 DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()),
1251 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
1252 /*ForVirtualBase=*/false)) {
1253 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
1254 DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP));
1256 if (CodeGenVTables::needsVTTParameter(CurGD)) {
1257 assert(I != E && "cannot skip vtt parameter, already done with args");
1258 assert(I->second == VoidPP && "skipping parameter not of vtt type");
1263 // Explicit arguments.
1264 for (; I != E; ++I) {
1265 const VarDecl *Param = I->first;
1266 QualType ArgType = Param->getType(); // because we're passing it to itself
1267 RValue Arg = EmitDelegateCallArg(Param);
1269 DelegateArgs.push_back(std::make_pair(Arg, ArgType));
1272 EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType),
1273 CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
1274 ReturnValueSlot(), DelegateArgs, Ctor);
1277 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
1279 bool ForVirtualBase,
1280 llvm::Value *This) {
1281 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
1283 llvm::Value *Callee = 0;
1284 if (getContext().getLangOptions().AppleKext)
1285 Callee = BuildAppleKextVirtualDestructorCall(DD, Type,
1289 Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
1291 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
1295 struct CallLocalDtor : EHScopeStack::Cleanup {
1296 const CXXDestructorDecl *Dtor;
1299 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
1300 : Dtor(D), Addr(Addr) {}
1302 void Emit(CodeGenFunction &CGF, bool IsForEH) {
1303 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
1304 /*ForVirtualBase=*/false, Addr);
1309 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
1310 llvm::Value *Addr) {
1311 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
1314 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
1315 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
1316 if (!ClassDecl) return;
1317 if (ClassDecl->hasTrivialDestructor()) return;
1319 const CXXDestructorDecl *D = ClassDecl->getDestructor();
1320 PushDestructorCleanup(D, Addr);
1324 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
1325 const CXXRecordDecl *ClassDecl,
1326 const CXXRecordDecl *BaseClassDecl) {
1327 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy);
1328 int64_t VBaseOffsetOffset =
1329 CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
1331 llvm::Value *VBaseOffsetPtr =
1332 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr");
1333 const llvm::Type *PtrDiffTy =
1334 ConvertType(getContext().getPointerDiffType());
1336 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
1337 PtrDiffTy->getPointerTo());
1339 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
1345 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
1346 const CXXRecordDecl *NearestVBase,
1347 uint64_t OffsetFromNearestVBase,
1348 llvm::Constant *VTable,
1349 const CXXRecordDecl *VTableClass) {
1350 const CXXRecordDecl *RD = Base.getBase();
1352 // Compute the address point.
1353 llvm::Value *VTableAddressPoint;
1355 // Check if we need to use a vtable from the VTT.
1356 if (CodeGenVTables::needsVTTParameter(CurGD) &&
1357 (RD->getNumVBases() || NearestVBase)) {
1358 // Get the secondary vpointer index.
1359 uint64_t VirtualPointerIndex =
1360 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
1363 llvm::Value *VTT = LoadCXXVTT();
1364 if (VirtualPointerIndex)
1365 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
1367 // And load the address point from the VTT.
1368 VTableAddressPoint = Builder.CreateLoad(VTT);
1370 uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass);
1371 VTableAddressPoint =
1372 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
1375 // Compute where to store the address point.
1376 llvm::Value *VirtualOffset = 0;
1377 uint64_t NonVirtualOffset = 0;
1379 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
1380 // We need to use the virtual base offset offset because the virtual base
1381 // might have a different offset in the most derived class.
1382 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
1384 NonVirtualOffset = OffsetFromNearestVBase / 8;
1386 // We can just use the base offset in the complete class.
1387 NonVirtualOffset = Base.getBaseOffset() / 8;
1390 // Apply the offsets.
1391 llvm::Value *VTableField = LoadCXXThis();
1393 if (NonVirtualOffset || VirtualOffset)
1394 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
1398 // Finally, store the address point.
1399 const llvm::Type *AddressPointPtrTy =
1400 VTableAddressPoint->getType()->getPointerTo();
1401 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
1402 Builder.CreateStore(VTableAddressPoint, VTableField);
1406 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
1407 const CXXRecordDecl *NearestVBase,
1408 uint64_t OffsetFromNearestVBase,
1409 bool BaseIsNonVirtualPrimaryBase,
1410 llvm::Constant *VTable,
1411 const CXXRecordDecl *VTableClass,
1412 VisitedVirtualBasesSetTy& VBases) {
1413 // If this base is a non-virtual primary base the address point has already
1415 if (!BaseIsNonVirtualPrimaryBase) {
1416 // Initialize the vtable pointer for this base.
1417 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
1418 VTable, VTableClass);
1421 const CXXRecordDecl *RD = Base.getBase();
1424 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1425 E = RD->bases_end(); I != E; ++I) {
1426 CXXRecordDecl *BaseDecl
1427 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1429 // Ignore classes without a vtable.
1430 if (!BaseDecl->isDynamicClass())
1433 uint64_t BaseOffset;
1434 uint64_t BaseOffsetFromNearestVBase;
1435 bool BaseDeclIsNonVirtualPrimaryBase;
1437 if (I->isVirtual()) {
1438 // Check if we've visited this virtual base before.
1439 if (!VBases.insert(BaseDecl))
1442 const ASTRecordLayout &Layout =
1443 getContext().getASTRecordLayout(VTableClass);
1445 BaseOffset = Layout.getVBaseClassOffsetInBits(BaseDecl);
1446 BaseOffsetFromNearestVBase = 0;
1447 BaseDeclIsNonVirtualPrimaryBase = false;
1449 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
1452 Base.getBaseOffset() + Layout.getBaseClassOffsetInBits(BaseDecl);
1453 BaseOffsetFromNearestVBase =
1454 OffsetFromNearestVBase + Layout.getBaseClassOffsetInBits(BaseDecl);
1455 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
1458 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
1459 I->isVirtual() ? BaseDecl : NearestVBase,
1460 BaseOffsetFromNearestVBase,
1461 BaseDeclIsNonVirtualPrimaryBase,
1462 VTable, VTableClass, VBases);
1466 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
1467 // Ignore classes without a vtable.
1468 if (!RD->isDynamicClass())
1472 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
1474 // Initialize the vtable pointers for this class and all of its bases.
1475 VisitedVirtualBasesSetTy VBases;
1476 InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0,
1477 /*OffsetFromNearestVBase=*/0,
1478 /*BaseIsNonVirtualPrimaryBase=*/false,
1479 VTable, RD, VBases);
1482 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This,
1483 const llvm::Type *Ty) {
1484 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo());
1485 return Builder.CreateLoad(VTablePtrSrc, "vtable");