1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This contains code dealing with C++ code generation of classes
12 //===----------------------------------------------------------------------===//
14 #include "CGDebugInfo.h"
15 #include "CodeGenFunction.h"
16 #include "clang/AST/CXXInheritance.h"
17 #include "clang/AST/RecordLayout.h"
18 #include "clang/AST/StmtCXX.h"
20 using namespace clang;
21 using namespace CodeGen;
24 ComputeNonVirtualBaseClassOffset(ASTContext &Context,
25 const CXXRecordDecl *DerivedClass,
26 CastExpr::path_const_iterator Start,
27 CastExpr::path_const_iterator End) {
30 const CXXRecordDecl *RD = DerivedClass;
32 for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
33 const CXXBaseSpecifier *Base = *I;
34 assert(!Base->isVirtual() && "Should not see virtual bases here!");
37 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
39 const CXXRecordDecl *BaseDecl =
40 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
43 Offset += Layout.getBaseClassOffset(BaseDecl);
48 // FIXME: We should not use / 8 here.
53 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
54 CastExpr::path_const_iterator PathBegin,
55 CastExpr::path_const_iterator PathEnd) {
56 assert(PathBegin != PathEnd && "Base path should not be empty!");
59 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
64 const llvm::Type *PtrDiffTy =
65 Types.ConvertType(getContext().getPointerDiffType());
67 return llvm::ConstantInt::get(PtrDiffTy, Offset);
70 /// Gets the address of a direct base class within a complete object.
71 /// This should only be used for (1) non-virtual bases or (2) virtual bases
72 /// when the type is known to be complete (e.g. in complete destructors).
74 /// The object pointed to by 'This' is assumed to be non-null.
76 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
77 const CXXRecordDecl *Derived,
78 const CXXRecordDecl *Base,
80 // 'this' must be a pointer (in some address space) to Derived.
81 assert(This->getType()->isPointerTy() &&
82 cast<llvm::PointerType>(This->getType())->getElementType()
83 == ConvertType(Derived));
85 // Compute the offset of the virtual base.
87 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
89 Offset = Layout.getVBaseClassOffset(Base);
91 Offset = Layout.getBaseClassOffset(Base);
93 // Shift and cast down to the base type.
94 // TODO: for complete types, this should be possible with a GEP.
95 llvm::Value *V = This;
97 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext());
98 V = Builder.CreateBitCast(V, Int8PtrTy);
99 V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8);
101 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
107 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr,
108 uint64_t NonVirtual, llvm::Value *Virtual) {
109 const llvm::Type *PtrDiffTy =
110 CGF.ConvertType(CGF.getContext().getPointerDiffType());
112 llvm::Value *NonVirtualOffset = 0;
114 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual);
116 llvm::Value *BaseOffset;
118 if (NonVirtualOffset)
119 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset);
121 BaseOffset = Virtual;
123 BaseOffset = NonVirtualOffset;
125 // Apply the base offset.
126 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
127 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy);
128 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr");
134 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
135 const CXXRecordDecl *Derived,
136 CastExpr::path_const_iterator PathBegin,
137 CastExpr::path_const_iterator PathEnd,
138 bool NullCheckValue) {
139 assert(PathBegin != PathEnd && "Base path should not be empty!");
141 CastExpr::path_const_iterator Start = PathBegin;
142 const CXXRecordDecl *VBase = 0;
144 // Get the virtual base.
145 if ((*Start)->isVirtual()) {
147 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
151 uint64_t NonVirtualOffset =
152 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
155 // Get the base pointer type.
156 const llvm::Type *BasePtrTy =
157 ConvertType((PathEnd[-1])->getType())->getPointerTo();
159 if (!NonVirtualOffset && !VBase) {
161 return Builder.CreateBitCast(Value, BasePtrTy);
164 llvm::BasicBlock *CastNull = 0;
165 llvm::BasicBlock *CastNotNull = 0;
166 llvm::BasicBlock *CastEnd = 0;
168 if (NullCheckValue) {
169 CastNull = createBasicBlock("cast.null");
170 CastNotNull = createBasicBlock("cast.notnull");
171 CastEnd = createBasicBlock("cast.end");
173 llvm::Value *IsNull =
174 Builder.CreateICmpEQ(Value,
175 llvm::Constant::getNullValue(Value->getType()));
176 Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
177 EmitBlock(CastNotNull);
180 llvm::Value *VirtualOffset = 0;
183 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
185 // Apply the offsets.
186 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
190 Value = Builder.CreateBitCast(Value, BasePtrTy);
192 if (NullCheckValue) {
193 Builder.CreateBr(CastEnd);
195 Builder.CreateBr(CastEnd);
198 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
199 PHI->reserveOperandSpace(2);
200 PHI->addIncoming(Value, CastNotNull);
201 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
210 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
211 const CXXRecordDecl *Derived,
212 CastExpr::path_const_iterator PathBegin,
213 CastExpr::path_const_iterator PathEnd,
214 bool NullCheckValue) {
215 assert(PathBegin != PathEnd && "Base path should not be empty!");
218 getContext().getCanonicalType(getContext().getTagDeclType(Derived));
219 const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
221 llvm::Value *NonVirtualOffset =
222 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
224 if (!NonVirtualOffset) {
225 // No offset, we can just cast back.
226 return Builder.CreateBitCast(Value, DerivedPtrTy);
229 llvm::BasicBlock *CastNull = 0;
230 llvm::BasicBlock *CastNotNull = 0;
231 llvm::BasicBlock *CastEnd = 0;
233 if (NullCheckValue) {
234 CastNull = createBasicBlock("cast.null");
235 CastNotNull = createBasicBlock("cast.notnull");
236 CastEnd = createBasicBlock("cast.end");
238 llvm::Value *IsNull =
239 Builder.CreateICmpEQ(Value,
240 llvm::Constant::getNullValue(Value->getType()));
241 Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
242 EmitBlock(CastNotNull);
246 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType());
247 Value = Builder.CreateSub(Value, NonVirtualOffset);
248 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy);
251 Value = Builder.CreateBitCast(Value, DerivedPtrTy);
253 if (NullCheckValue) {
254 Builder.CreateBr(CastEnd);
256 Builder.CreateBr(CastEnd);
259 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
260 PHI->reserveOperandSpace(2);
261 PHI->addIncoming(Value, CastNotNull);
262 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
270 /// GetVTTParameter - Return the VTT parameter that should be passed to a
271 /// base constructor/destructor with virtual bases.
272 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
273 bool ForVirtualBase) {
274 if (!CodeGenVTables::needsVTTParameter(GD)) {
275 // This constructor/destructor does not need a VTT parameter.
279 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
280 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
284 uint64_t SubVTTIndex;
286 // If the record matches the base, this is the complete ctor/dtor
287 // variant calling the base variant in a class with virtual bases.
289 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
290 "doing no-op VTT offset in base dtor/ctor?");
291 assert(!ForVirtualBase && "Can't have same class as virtual base!");
294 const ASTRecordLayout &Layout =
295 CGF.getContext().getASTRecordLayout(RD);
296 uint64_t BaseOffset = ForVirtualBase ?
297 Layout.getVBaseClassOffset(Base) : Layout.getBaseClassOffset(Base);
300 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
301 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
304 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
305 // A VTT parameter was passed to the constructor, use it.
306 VTT = CGF.LoadCXXVTT();
307 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
309 // We're the complete constructor, so get the VTT by name.
310 VTT = CGF.CGM.getVTables().getVTT(RD);
311 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
318 /// Call the destructor for a direct base class.
319 struct CallBaseDtor : EHScopeStack::Cleanup {
320 const CXXRecordDecl *BaseClass;
322 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
323 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
325 void Emit(CodeGenFunction &CGF, bool IsForEH) {
326 const CXXRecordDecl *DerivedClass =
327 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
329 const CXXDestructorDecl *D = BaseClass->getDestructor();
331 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
332 DerivedClass, BaseClass,
334 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
339 static void EmitBaseInitializer(CodeGenFunction &CGF,
340 const CXXRecordDecl *ClassDecl,
341 CXXBaseOrMemberInitializer *BaseInit,
342 CXXCtorType CtorType) {
343 assert(BaseInit->isBaseInitializer() &&
344 "Must have base initializer!");
346 llvm::Value *ThisPtr = CGF.LoadCXXThis();
348 const Type *BaseType = BaseInit->getBaseClass();
349 CXXRecordDecl *BaseClassDecl =
350 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
352 bool isBaseVirtual = BaseInit->isBaseVirtual();
354 // The base constructor doesn't construct virtual bases.
355 if (CtorType == Ctor_Base && isBaseVirtual)
358 // We can pretend to be a complete class because it only matters for
359 // virtual bases, and we only do virtual bases for complete ctors.
361 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
365 CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true);
367 if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor())
368 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
372 static void EmitAggMemberInitializer(CodeGenFunction &CGF,
374 llvm::Value *ArrayIndexVar,
375 CXXBaseOrMemberInitializer *MemberInit,
378 if (Index == MemberInit->getNumArrayIndices()) {
379 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
381 llvm::Value *Dest = LHS.getAddress();
383 // If we have an array index variable, load it and use it as an offset.
384 // Then, increment the value.
385 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
386 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
387 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
388 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
389 CGF.Builder.CreateStore(Next, ArrayIndexVar);
392 CGF.EmitAggExpr(MemberInit->getInit(), Dest,
393 LHS.isVolatileQualified(),
394 /*IgnoreResult*/ false,
395 /*IsInitializer*/ true);
400 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
401 assert(Array && "Array initialization without the array type?");
402 llvm::Value *IndexVar
403 = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index));
404 assert(IndexVar && "Array index variable not loaded");
406 // Initialize this index variable to zero.
408 = llvm::Constant::getNullValue(
409 CGF.ConvertType(CGF.getContext().getSizeType()));
410 CGF.Builder.CreateStore(Zero, IndexVar);
412 // Start the loop with a block that tests the condition.
413 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
414 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
416 CGF.EmitBlock(CondBlock);
418 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
419 // Generate: if (loop-index < number-of-elements) fall to the loop body,
420 // otherwise, go to the block after the for-loop.
421 uint64_t NumElements = Array->getSize().getZExtValue();
422 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
423 llvm::Value *NumElementsPtr =
424 llvm::ConstantInt::get(Counter->getType(), NumElements);
425 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
428 // If the condition is true, execute the body.
429 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
431 CGF.EmitBlock(ForBody);
432 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
435 CodeGenFunction::RunCleanupsScope Cleanups(CGF);
437 // Inside the loop body recurse to emit the inner loop or, eventually, the
439 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit,
440 Array->getElementType(), Index + 1);
443 CGF.EmitBlock(ContinueBlock);
445 // Emit the increment of the loop counter.
446 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
447 Counter = CGF.Builder.CreateLoad(IndexVar);
448 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
449 CGF.Builder.CreateStore(NextVal, IndexVar);
451 // Finally, branch back up to the condition for the next iteration.
452 CGF.EmitBranch(CondBlock);
454 // Emit the fall-through block.
455 CGF.EmitBlock(AfterFor, true);
459 struct CallMemberDtor : EHScopeStack::Cleanup {
461 CXXDestructorDecl *Dtor;
463 CallMemberDtor(FieldDecl *Field, CXXDestructorDecl *Dtor)
464 : Field(Field), Dtor(Dtor) {}
466 void Emit(CodeGenFunction &CGF, bool IsForEH) {
467 // FIXME: Is this OK for C++0x delegating constructors?
468 llvm::Value *ThisPtr = CGF.LoadCXXThis();
469 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0);
471 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
477 static void EmitMemberInitializer(CodeGenFunction &CGF,
478 const CXXRecordDecl *ClassDecl,
479 CXXBaseOrMemberInitializer *MemberInit,
480 const CXXConstructorDecl *Constructor,
481 FunctionArgList &Args) {
482 assert(MemberInit->isMemberInitializer() &&
483 "Must have member initializer!");
485 // non-static data member initializers.
486 FieldDecl *Field = MemberInit->getMember();
487 QualType FieldType = CGF.getContext().getCanonicalType(Field->getType());
489 llvm::Value *ThisPtr = CGF.LoadCXXThis();
492 // If we are initializing an anonymous union field, drill down to the field.
493 if (MemberInit->getAnonUnionMember()) {
494 Field = MemberInit->getAnonUnionMember();
495 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, Field, 0);
496 FieldType = Field->getType();
498 LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0);
501 // FIXME: If there's no initializer and the CXXBaseOrMemberInitializer
502 // was implicitly generated, we shouldn't be zeroing memory.
504 if (FieldType->isReferenceType()) {
505 RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), Field);
506 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
507 } else if (FieldType->isArrayType() && !MemberInit->getInit()) {
508 CGF.EmitNullInitialization(LHS.getAddress(), Field->getType());
509 } else if (!CGF.hasAggregateLLVMType(Field->getType())) {
510 RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit()));
511 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
512 } else if (MemberInit->getInit()->getType()->isAnyComplexType()) {
513 CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(),
514 LHS.isVolatileQualified());
516 llvm::Value *ArrayIndexVar = 0;
517 const ConstantArrayType *Array
518 = CGF.getContext().getAsConstantArrayType(FieldType);
519 if (Array && Constructor->isImplicit() &&
520 Constructor->isCopyConstructor()) {
521 const llvm::Type *SizeTy
522 = CGF.ConvertType(CGF.getContext().getSizeType());
524 // The LHS is a pointer to the first object we'll be constructing, as
526 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
527 const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy);
528 BasePtr = llvm::PointerType::getUnqual(BasePtr);
529 llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(),
531 LHS = CGF.MakeAddrLValue(BaseAddrPtr, BaseElementTy);
533 // Create an array index that will be used to walk over all of the
534 // objects we're constructing.
535 ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index");
536 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
537 CGF.Builder.CreateStore(Zero, ArrayIndexVar);
539 // If we are copying an array of scalars or classes with trivial copy
540 // constructors, perform a single aggregate copy.
541 const RecordType *Record = BaseElementTy->getAs<RecordType>();
543 cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) {
544 // Find the source pointer. We knows it's the last argument because
545 // we know we're in a copy constructor.
546 unsigned SrcArgIndex = Args.size() - 1;
548 = CGF.Builder.CreateLoad(
549 CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first));
550 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0);
552 // Copy the aggregate.
553 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
554 LHS.isVolatileQualified());
558 // Emit the block variables for the array indices, if any.
559 for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I)
560 CGF.EmitLocalBlockVarDecl(*MemberInit->getArrayIndex(I));
563 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0);
568 // FIXME: If we have an array of classes w/ non-trivial destructors,
569 // we need to destroy in reverse order of construction along the exception
571 const RecordType *RT = FieldType->getAs<RecordType>();
575 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
576 if (!RD->hasTrivialDestructor())
577 CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
578 RD->getDestructor());
582 /// Checks whether the given constructor is a valid subject for the
583 /// complete-to-base constructor delegation optimization, i.e.
584 /// emitting the complete constructor as a simple call to the base
586 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
588 // Currently we disable the optimization for classes with virtual
589 // bases because (1) the addresses of parameter variables need to be
590 // consistent across all initializers but (2) the delegate function
591 // call necessarily creates a second copy of the parameter variable.
593 // The limiting example (purely theoretical AFAIK):
594 // struct A { A(int &c) { c++; } };
595 // struct B : virtual A {
596 // B(int count) : A(count) { printf("%d\n", count); }
598 // ...although even this example could in principle be emitted as a
599 // delegation since the address of the parameter doesn't escape.
600 if (Ctor->getParent()->getNumVBases()) {
601 // TODO: white-list trivial vbase initializers. This case wouldn't
602 // be subject to the restrictions below.
604 // TODO: white-list cases where:
605 // - there are no non-reference parameters to the constructor
606 // - the initializers don't access any non-reference parameters
607 // - the initializers don't take the address of non-reference
610 // If we ever add any of the above cases, remember that:
611 // - function-try-blocks will always blacklist this optimization
612 // - we need to perform the constructor prologue and cleanup in
613 // EmitConstructorBody.
618 // We also disable the optimization for variadic functions because
619 // it's impossible to "re-pass" varargs.
620 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
626 /// EmitConstructorBody - Emits the body of the current constructor.
627 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
628 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
629 CXXCtorType CtorType = CurGD.getCtorType();
631 // Before we go any further, try the complete->base constructor
632 // delegation optimization.
633 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) {
634 if (CGDebugInfo *DI = getDebugInfo())
635 DI->EmitStopPoint(Builder);
636 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
640 Stmt *Body = Ctor->getBody();
642 // Enter the function-try-block before the constructor prologue if
644 bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
646 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
648 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
650 // Emit the constructor prologue, i.e. the base and member
652 EmitCtorPrologue(Ctor, CtorType, Args);
654 // Emit the body of the statement.
656 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
660 // Emit any cleanup blocks associated with the member or base
661 // initializers, which includes (along the exceptional path) the
662 // destructors for those members and bases that were fully
664 PopCleanupBlocks(CleanupDepth);
667 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
670 /// EmitCtorPrologue - This routine generates necessary code to initialize
671 /// base classes and non-static data members belonging to this constructor.
672 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
673 CXXCtorType CtorType,
674 FunctionArgList &Args) {
675 const CXXRecordDecl *ClassDecl = CD->getParent();
677 llvm::SmallVector<CXXBaseOrMemberInitializer *, 8> MemberInitializers;
679 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
682 CXXBaseOrMemberInitializer *Member = (*B);
684 if (Member->isBaseInitializer())
685 EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
687 MemberInitializers.push_back(Member);
690 InitializeVTablePointers(ClassDecl);
692 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
693 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
696 /// EmitDestructorBody - Emits the body of the current destructor.
697 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
698 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
699 CXXDtorType DtorType = CurGD.getDtorType();
701 // The call to operator delete in a deleting destructor happens
702 // outside of the function-try-block, which means it's always
703 // possible to delegate the destructor body to the complete
704 // destructor. Do so.
705 if (DtorType == Dtor_Deleting) {
706 EnterDtorCleanups(Dtor, Dtor_Deleting);
707 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
713 Stmt *Body = Dtor->getBody();
715 // If the body is a function-try-block, enter the try before
717 bool isTryBody = (Body && isa<CXXTryStmt>(Body));
719 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
721 // Enter the epilogue cleanups.
722 RunCleanupsScope DtorEpilogue(*this);
724 // If this is the complete variant, just invoke the base variant;
725 // the epilogue will destruct the virtual bases. But we can't do
726 // this optimization if the body is a function-try-block, because
727 // we'd introduce *two* handler blocks.
729 case Dtor_Deleting: llvm_unreachable("already handled deleting case");
732 // Enter the cleanup scopes for virtual bases.
733 EnterDtorCleanups(Dtor, Dtor_Complete);
736 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
740 // Fallthrough: act like we're in the base variant.
743 // Enter the cleanup scopes for fields and non-virtual bases.
744 EnterDtorCleanups(Dtor, Dtor_Base);
746 // Initialize the vtable pointers before entering the body.
747 InitializeVTablePointers(Dtor->getParent());
750 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
754 assert(Dtor->isImplicit() && "bodyless dtor not implicit");
755 // nothing to do besides what's in the epilogue
760 // Jump out through the epilogue cleanups.
761 DtorEpilogue.ForceCleanup();
763 // Exit the try if applicable.
765 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
769 /// Call the operator delete associated with the current destructor.
770 struct CallDtorDelete : EHScopeStack::Cleanup {
773 void Emit(CodeGenFunction &CGF, bool IsForEH) {
774 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
775 const CXXRecordDecl *ClassDecl = Dtor->getParent();
776 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
777 CGF.getContext().getTagDeclType(ClassDecl));
781 struct CallArrayFieldDtor : EHScopeStack::Cleanup {
782 const FieldDecl *Field;
783 CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
785 void Emit(CodeGenFunction &CGF, bool IsForEH) {
786 QualType FieldType = Field->getType();
787 const ConstantArrayType *Array =
788 CGF.getContext().getAsConstantArrayType(FieldType);
791 CGF.getContext().getBaseElementType(Array->getElementType());
792 const CXXRecordDecl *FieldClassDecl = BaseType->getAsCXXRecordDecl();
794 llvm::Value *ThisPtr = CGF.LoadCXXThis();
795 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
796 // FIXME: Qualifiers?
797 /*CVRQualifiers=*/0);
799 const llvm::Type *BasePtr = CGF.ConvertType(BaseType)->getPointerTo();
800 llvm::Value *BaseAddrPtr =
801 CGF.Builder.CreateBitCast(LHS.getAddress(), BasePtr);
802 CGF.EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(),
807 struct CallFieldDtor : EHScopeStack::Cleanup {
808 const FieldDecl *Field;
809 CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
811 void Emit(CodeGenFunction &CGF, bool IsForEH) {
812 const CXXRecordDecl *FieldClassDecl =
813 Field->getType()->getAsCXXRecordDecl();
815 llvm::Value *ThisPtr = CGF.LoadCXXThis();
816 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
817 // FIXME: Qualifiers?
818 /*CVRQualifiers=*/0);
820 CGF.EmitCXXDestructorCall(FieldClassDecl->getDestructor(),
821 Dtor_Complete, /*ForVirtualBase=*/false,
827 /// EmitDtorEpilogue - Emit all code that comes at the end of class's
828 /// destructor. This is to call destructors on members and base classes
829 /// in reverse order of their construction.
830 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
831 CXXDtorType DtorType) {
832 assert(!DD->isTrivial() &&
833 "Should not emit dtor epilogue for trivial dtor!");
835 // The deleting-destructor phase just needs to call the appropriate
836 // operator delete that Sema picked up.
837 if (DtorType == Dtor_Deleting) {
838 assert(DD->getOperatorDelete() &&
839 "operator delete missing - EmitDtorEpilogue");
840 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
844 const CXXRecordDecl *ClassDecl = DD->getParent();
846 // The complete-destructor phase just destructs all the virtual bases.
847 if (DtorType == Dtor_Complete) {
849 // We push them in the forward order so that they'll be popped in
850 // the reverse order.
851 for (CXXRecordDecl::base_class_const_iterator I =
852 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
854 const CXXBaseSpecifier &Base = *I;
855 CXXRecordDecl *BaseClassDecl
856 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
858 // Ignore trivial destructors.
859 if (BaseClassDecl->hasTrivialDestructor())
862 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
864 /*BaseIsVirtual*/ true);
870 assert(DtorType == Dtor_Base);
872 // Destroy non-virtual bases.
873 for (CXXRecordDecl::base_class_const_iterator I =
874 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
875 const CXXBaseSpecifier &Base = *I;
877 // Ignore virtual bases.
878 if (Base.isVirtual())
881 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
883 // Ignore trivial destructors.
884 if (BaseClassDecl->hasTrivialDestructor())
887 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
889 /*BaseIsVirtual*/ false);
892 // Destroy direct fields.
893 llvm::SmallVector<const FieldDecl *, 16> FieldDecls;
894 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
895 E = ClassDecl->field_end(); I != E; ++I) {
896 const FieldDecl *Field = *I;
898 QualType FieldType = getContext().getCanonicalType(Field->getType());
899 const ConstantArrayType *Array =
900 getContext().getAsConstantArrayType(FieldType);
902 FieldType = getContext().getBaseElementType(Array->getElementType());
904 const RecordType *RT = FieldType->getAs<RecordType>();
908 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
909 if (FieldClassDecl->hasTrivialDestructor())
913 EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
915 EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
919 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested)
920 /// for-loop to call the default constructor on individual members of the
922 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the
923 /// array type and 'ArrayPtr' points to the beginning fo the array.
924 /// It is assumed that all relevant checks have been made by the caller.
926 /// \param ZeroInitialization True if each element should be zero-initialized
927 /// before it is constructed.
929 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
930 const ConstantArrayType *ArrayTy,
931 llvm::Value *ArrayPtr,
932 CallExpr::const_arg_iterator ArgBeg,
933 CallExpr::const_arg_iterator ArgEnd,
934 bool ZeroInitialization) {
936 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
937 llvm::Value * NumElements =
938 llvm::ConstantInt::get(SizeTy,
939 getContext().getConstantArrayElementCount(ArrayTy));
941 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd,
946 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
947 llvm::Value *NumElements,
948 llvm::Value *ArrayPtr,
949 CallExpr::const_arg_iterator ArgBeg,
950 CallExpr::const_arg_iterator ArgEnd,
951 bool ZeroInitialization) {
952 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
954 // Create a temporary for the loop index and initialize it with 0.
955 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index");
956 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
957 Builder.CreateStore(Zero, IndexPtr);
959 // Start the loop with a block that tests the condition.
960 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
961 llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
963 EmitBlock(CondBlock);
965 llvm::BasicBlock *ForBody = createBasicBlock("for.body");
967 // Generate: if (loop-index < number-of-elements fall to the loop body,
968 // otherwise, go to the block after the for-loop.
969 llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
970 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless");
971 // If the condition is true, execute the body.
972 Builder.CreateCondBr(IsLess, ForBody, AfterFor);
976 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
977 // Inside the loop body, emit the constructor call on the array element.
978 Counter = Builder.CreateLoad(IndexPtr);
979 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter,
982 // Zero initialize the storage, if requested.
983 if (ZeroInitialization)
984 EmitNullInitialization(Address,
985 getContext().getTypeDeclType(D->getParent()));
987 // C++ [class.temporary]p4:
988 // There are two contexts in which temporaries are destroyed at a different
989 // point than the end of the full-expression. The first context is when a
990 // default constructor is called to initialize an element of an array.
991 // If the constructor has one or more default arguments, the destruction of
992 // every temporary created in a default argument expression is sequenced
993 // before the construction of the next array element, if any.
995 // Keep track of the current number of live temporaries.
997 RunCleanupsScope Scope(*this);
999 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address,
1003 EmitBlock(ContinueBlock);
1005 // Emit the increment of the loop counter.
1006 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1);
1007 Counter = Builder.CreateLoad(IndexPtr);
1008 NextVal = Builder.CreateAdd(Counter, NextVal, "inc");
1009 Builder.CreateStore(NextVal, IndexPtr);
1011 // Finally, branch back up to the condition for the next iteration.
1012 EmitBranch(CondBlock);
1014 // Emit the fall-through block.
1015 EmitBlock(AfterFor, true);
1018 /// EmitCXXAggrDestructorCall - calls the default destructor on array
1019 /// elements in reverse order of construction.
1021 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1022 const ArrayType *Array,
1023 llvm::Value *This) {
1024 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array);
1025 assert(CA && "Do we support VLA for destruction ?");
1026 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA);
1028 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
1029 llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount);
1030 EmitCXXAggrDestructorCall(D, ElementCountPtr, This);
1033 /// EmitCXXAggrDestructorCall - calls the default destructor on array
1034 /// elements in reverse order of construction.
1036 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1037 llvm::Value *UpperCount,
1038 llvm::Value *This) {
1039 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
1040 llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1);
1042 // Create a temporary for the loop index and initialize it with count of
1044 llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index");
1046 // Store the number of elements in the index pointer.
1047 Builder.CreateStore(UpperCount, IndexPtr);
1049 // Start the loop with a block that tests the condition.
1050 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
1051 llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
1053 EmitBlock(CondBlock);
1055 llvm::BasicBlock *ForBody = createBasicBlock("for.body");
1057 // Generate: if (loop-index != 0 fall to the loop body,
1058 // otherwise, go to the block after the for-loop.
1059 llvm::Value* zeroConstant =
1060 llvm::Constant::getNullValue(SizeLTy);
1061 llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
1062 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant,
1064 // If the condition is true, execute the body.
1065 Builder.CreateCondBr(IsNE, ForBody, AfterFor);
1069 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
1070 // Inside the loop body, emit the constructor call on the array element.
1071 Counter = Builder.CreateLoad(IndexPtr);
1072 Counter = Builder.CreateSub(Counter, One);
1073 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx");
1074 EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address);
1076 EmitBlock(ContinueBlock);
1078 // Emit the decrement of the loop counter.
1079 Counter = Builder.CreateLoad(IndexPtr);
1080 Counter = Builder.CreateSub(Counter, One, "dec");
1081 Builder.CreateStore(Counter, IndexPtr);
1083 // Finally, branch back up to the condition for the next iteration.
1084 EmitBranch(CondBlock);
1086 // Emit the fall-through block.
1087 EmitBlock(AfterFor, true);
1091 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
1092 CXXCtorType Type, bool ForVirtualBase,
1094 CallExpr::const_arg_iterator ArgBeg,
1095 CallExpr::const_arg_iterator ArgEnd) {
1096 if (D->isTrivial()) {
1097 if (ArgBeg == ArgEnd) {
1098 // Trivial default constructor, no codegen required.
1099 assert(D->isDefaultConstructor() &&
1100 "trivial 0-arg ctor not a default ctor");
1104 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1105 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor");
1107 const Expr *E = (*ArgBeg);
1108 QualType Ty = E->getType();
1109 llvm::Value *Src = EmitLValue(E).getAddress();
1110 EmitAggregateCopy(This, Src, Ty);
1114 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
1115 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
1117 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
1121 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1122 CXXCtorType CtorType,
1123 const FunctionArgList &Args) {
1124 CallArgList DelegateArgs;
1126 FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
1127 assert(I != E && "no parameters to constructor");
1130 DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()),
1135 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
1136 /*ForVirtualBase=*/false)) {
1137 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
1138 DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP));
1140 if (CodeGenVTables::needsVTTParameter(CurGD)) {
1141 assert(I != E && "cannot skip vtt parameter, already done with args");
1142 assert(I->second == VoidPP && "skipping parameter not of vtt type");
1147 // Explicit arguments.
1148 for (; I != E; ++I) {
1149 const VarDecl *Param = I->first;
1150 QualType ArgType = Param->getType(); // because we're passing it to itself
1151 RValue Arg = EmitDelegateCallArg(Param);
1153 DelegateArgs.push_back(std::make_pair(Arg, ArgType));
1156 EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType),
1157 CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
1158 ReturnValueSlot(), DelegateArgs, Ctor);
1161 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
1163 bool ForVirtualBase,
1164 llvm::Value *This) {
1165 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
1167 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
1169 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
1173 struct CallLocalDtor : EHScopeStack::Cleanup {
1174 const CXXDestructorDecl *Dtor;
1177 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
1178 : Dtor(D), Addr(Addr) {}
1180 void Emit(CodeGenFunction &CGF, bool IsForEH) {
1181 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
1182 /*ForVirtualBase=*/false, Addr);
1187 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
1188 llvm::Value *Addr) {
1189 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
1192 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
1193 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
1194 if (!ClassDecl) return;
1195 if (ClassDecl->hasTrivialDestructor()) return;
1197 const CXXDestructorDecl *D = ClassDecl->getDestructor();
1198 PushDestructorCleanup(D, Addr);
1202 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
1203 const CXXRecordDecl *ClassDecl,
1204 const CXXRecordDecl *BaseClassDecl) {
1205 const llvm::Type *Int8PtrTy =
1206 llvm::Type::getInt8Ty(VMContext)->getPointerTo();
1208 llvm::Value *VTablePtr = Builder.CreateBitCast(This,
1209 Int8PtrTy->getPointerTo());
1210 VTablePtr = Builder.CreateLoad(VTablePtr, "vtable");
1212 int64_t VBaseOffsetOffset =
1213 CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
1215 llvm::Value *VBaseOffsetPtr =
1216 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr");
1217 const llvm::Type *PtrDiffTy =
1218 ConvertType(getContext().getPointerDiffType());
1220 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
1221 PtrDiffTy->getPointerTo());
1223 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
1229 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
1230 const CXXRecordDecl *NearestVBase,
1231 uint64_t OffsetFromNearestVBase,
1232 llvm::Constant *VTable,
1233 const CXXRecordDecl *VTableClass) {
1234 const CXXRecordDecl *RD = Base.getBase();
1236 // Compute the address point.
1237 llvm::Value *VTableAddressPoint;
1239 // Check if we need to use a vtable from the VTT.
1240 if (CodeGenVTables::needsVTTParameter(CurGD) &&
1241 (RD->getNumVBases() || NearestVBase)) {
1242 // Get the secondary vpointer index.
1243 uint64_t VirtualPointerIndex =
1244 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
1247 llvm::Value *VTT = LoadCXXVTT();
1248 if (VirtualPointerIndex)
1249 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
1251 // And load the address point from the VTT.
1252 VTableAddressPoint = Builder.CreateLoad(VTT);
1254 uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass);
1255 VTableAddressPoint =
1256 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
1259 // Compute where to store the address point.
1260 llvm::Value *VirtualOffset = 0;
1261 uint64_t NonVirtualOffset = 0;
1263 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
1264 // We need to use the virtual base offset offset because the virtual base
1265 // might have a different offset in the most derived class.
1266 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
1268 NonVirtualOffset = OffsetFromNearestVBase / 8;
1270 // We can just use the base offset in the complete class.
1271 NonVirtualOffset = Base.getBaseOffset() / 8;
1274 // Apply the offsets.
1275 llvm::Value *VTableField = LoadCXXThis();
1277 if (NonVirtualOffset || VirtualOffset)
1278 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
1282 // Finally, store the address point.
1283 const llvm::Type *AddressPointPtrTy =
1284 VTableAddressPoint->getType()->getPointerTo();
1285 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
1286 Builder.CreateStore(VTableAddressPoint, VTableField);
1290 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
1291 const CXXRecordDecl *NearestVBase,
1292 uint64_t OffsetFromNearestVBase,
1293 bool BaseIsNonVirtualPrimaryBase,
1294 llvm::Constant *VTable,
1295 const CXXRecordDecl *VTableClass,
1296 VisitedVirtualBasesSetTy& VBases) {
1297 // If this base is a non-virtual primary base the address point has already
1299 if (!BaseIsNonVirtualPrimaryBase) {
1300 // Initialize the vtable pointer for this base.
1301 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
1302 VTable, VTableClass);
1305 const CXXRecordDecl *RD = Base.getBase();
1308 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1309 E = RD->bases_end(); I != E; ++I) {
1310 CXXRecordDecl *BaseDecl
1311 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1313 // Ignore classes without a vtable.
1314 if (!BaseDecl->isDynamicClass())
1317 uint64_t BaseOffset;
1318 uint64_t BaseOffsetFromNearestVBase;
1319 bool BaseDeclIsNonVirtualPrimaryBase;
1321 if (I->isVirtual()) {
1322 // Check if we've visited this virtual base before.
1323 if (!VBases.insert(BaseDecl))
1326 const ASTRecordLayout &Layout =
1327 getContext().getASTRecordLayout(VTableClass);
1329 BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
1330 BaseOffsetFromNearestVBase = 0;
1331 BaseDeclIsNonVirtualPrimaryBase = false;
1333 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
1335 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
1336 BaseOffsetFromNearestVBase =
1337 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
1338 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
1341 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
1342 I->isVirtual() ? BaseDecl : NearestVBase,
1343 BaseOffsetFromNearestVBase,
1344 BaseDeclIsNonVirtualPrimaryBase,
1345 VTable, VTableClass, VBases);
1349 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
1350 // Ignore classes without a vtable.
1351 if (!RD->isDynamicClass())
1355 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
1357 // Initialize the vtable pointers for this class and all of its bases.
1358 VisitedVirtualBasesSetTy VBases;
1359 InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0,
1360 /*OffsetFromNearestVBase=*/0,
1361 /*BaseIsNonVirtualPrimaryBase=*/false,
1362 VTable, RD, VBases);