1 //===--- CGDecl.cpp - Emit LLVM Code for declarations ---------------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This contains code to emit Decl nodes as LLVM code.
12 //===----------------------------------------------------------------------===//
14 #include "CGDebugInfo.h"
15 #include "CodeGenFunction.h"
16 #include "CodeGenModule.h"
17 #include "clang/AST/ASTContext.h"
18 #include "clang/AST/Decl.h"
19 #include "clang/AST/DeclObjC.h"
20 #include "clang/Basic/SourceManager.h"
21 #include "clang/Basic/TargetInfo.h"
22 #include "clang/CodeGen/CodeGenOptions.h"
23 #include "llvm/GlobalVariable.h"
24 #include "llvm/Intrinsics.h"
25 #include "llvm/Target/TargetData.h"
26 #include "llvm/Type.h"
27 using namespace clang;
28 using namespace CodeGen;
31 void CodeGenFunction::EmitDecl(const Decl &D) {
32 switch (D.getKind()) {
34 CGM.ErrorUnsupported(&D, "decl");
37 assert(0 && "Parmdecls should not be in declstmts!");
38 case Decl::Function: // void X();
39 case Decl::Record: // struct/union/class X;
40 case Decl::Enum: // enum X;
41 case Decl::EnumConstant: // enum ? { X = ? }
42 case Decl::CXXRecord: // struct/union/class X; [C++]
43 case Decl::Using: // using X; [C++]
44 case Decl::UsingShadow:
45 case Decl::UsingDirective: // using namespace X; [C++]
46 case Decl::StaticAssert: // static_assert(X, ""); [C++0x]
47 // None of these decls require codegen support.
51 const VarDecl &VD = cast<VarDecl>(D);
52 assert(VD.isBlockVarDecl() &&
53 "Should not see file-scope variables inside a function!");
54 return EmitBlockVarDecl(VD);
57 case Decl::Typedef: { // typedef int X;
58 const TypedefDecl &TD = cast<TypedefDecl>(D);
59 QualType Ty = TD.getUnderlyingType();
61 if (Ty->isVariablyModifiedType())
67 /// EmitBlockVarDecl - This method handles emission of any variable declaration
68 /// inside a function, including static vars etc.
69 void CodeGenFunction::EmitBlockVarDecl(const VarDecl &D) {
70 if (D.hasAttr<AsmLabelAttr>())
71 CGM.ErrorUnsupported(&D, "__asm__");
73 switch (D.getStorageClass()) {
76 case VarDecl::Register:
77 return EmitLocalBlockVarDecl(D);
79 return EmitStaticBlockVarDecl(D);
81 case VarDecl::PrivateExtern:
82 // Don't emit it now, allow it to be emitted lazily on its first use.
86 assert(0 && "Unknown storage class");
89 static std::string GetStaticDeclName(CodeGenFunction &CGF, const VarDecl &D,
90 const char *Separator) {
91 CodeGenModule &CGM = CGF.CGM;
92 if (CGF.getContext().getLangOptions().CPlusPlus)
93 return CGM.getMangledName(&D);
95 std::string ContextName;
96 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CGF.CurFuncDecl))
97 ContextName = CGM.getMangledName(FD);
98 else if (isa<ObjCMethodDecl>(CGF.CurFuncDecl))
99 ContextName = CGF.CurFn->getName();
101 // FIXME: What about in a block??
102 assert(0 && "Unknown context for block var decl");
104 return ContextName + Separator + D.getNameAsString();
107 llvm::GlobalVariable *
108 CodeGenFunction::CreateStaticBlockVarDecl(const VarDecl &D,
109 const char *Separator,
110 llvm::GlobalValue::LinkageTypes Linkage) {
111 QualType Ty = D.getType();
112 assert(Ty->isConstantSizeType() && "VLAs can't be static");
114 std::string Name = GetStaticDeclName(*this, D, Separator);
116 const llvm::Type *LTy = CGM.getTypes().ConvertTypeForMem(Ty);
117 llvm::GlobalVariable *GV =
118 new llvm::GlobalVariable(CGM.getModule(), LTy,
119 Ty.isConstant(getContext()), Linkage,
120 CGM.EmitNullConstant(D.getType()), Name, 0,
121 D.isThreadSpecified(), Ty.getAddressSpace());
122 GV->setAlignment(getContext().getDeclAlignInBytes(&D));
126 /// AddInitializerToGlobalBlockVarDecl - Add the initializer for 'D' to the
127 /// global variable that has already been created for it. If the initializer
128 /// has a different type than GV does, this may free GV and return a different
129 /// one. Otherwise it just returns GV.
130 llvm::GlobalVariable *
131 CodeGenFunction::AddInitializerToGlobalBlockVarDecl(const VarDecl &D,
132 llvm::GlobalVariable *GV) {
133 llvm::Constant *Init = CGM.EmitConstantExpr(D.getInit(), D.getType(), this);
135 // If constant emission failed, then this should be a C++ static
138 if (!getContext().getLangOptions().CPlusPlus)
139 CGM.ErrorUnsupported(D.getInit(), "constant l-value expression");
141 EmitStaticCXXBlockVarDeclInit(D, GV);
145 // The initializer may differ in type from the global. Rewrite
146 // the global to match the initializer. (We have to do this
147 // because some types, like unions, can't be completely represented
148 // in the LLVM type system.)
149 if (GV->getType() != Init->getType()) {
150 llvm::GlobalVariable *OldGV = GV;
152 GV = new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
154 OldGV->getLinkage(), Init, "",
155 0, D.isThreadSpecified(),
156 D.getType().getAddressSpace());
158 // Steal the name of the old global
161 // Replace all uses of the old global with the new global
162 llvm::Constant *NewPtrForOldDecl =
163 llvm::ConstantExpr::getBitCast(GV, OldGV->getType());
164 OldGV->replaceAllUsesWith(NewPtrForOldDecl);
166 // Erase the old global, since it is no longer used.
167 OldGV->eraseFromParent();
170 GV->setInitializer(Init);
174 void CodeGenFunction::EmitStaticBlockVarDecl(const VarDecl &D) {
175 llvm::Value *&DMEntry = LocalDeclMap[&D];
176 assert(DMEntry == 0 && "Decl already exists in localdeclmap!");
178 llvm::GlobalVariable *GV =
179 CreateStaticBlockVarDecl(D, ".", llvm::GlobalValue::InternalLinkage);
181 // Store into LocalDeclMap before generating initializer to handle
182 // circular references.
185 // Make sure to evaluate VLA bounds now so that we have them for later.
187 // FIXME: Can this happen?
188 if (D.getType()->isVariablyModifiedType())
189 EmitVLASize(D.getType());
191 // If this value has an initializer, emit it.
193 GV = AddInitializerToGlobalBlockVarDecl(D, GV);
195 // FIXME: Merge attribute handling.
196 if (const AnnotateAttr *AA = D.getAttr<AnnotateAttr>()) {
197 SourceManager &SM = CGM.getContext().getSourceManager();
198 llvm::Constant *Ann =
199 CGM.EmitAnnotateAttr(GV, AA,
200 SM.getInstantiationLineNumber(D.getLocation()));
201 CGM.AddAnnotation(Ann);
204 if (const SectionAttr *SA = D.getAttr<SectionAttr>())
205 GV->setSection(SA->getName());
207 if (D.hasAttr<UsedAttr>())
208 CGM.AddUsedGlobal(GV);
210 // We may have to cast the constant because of the initializer
213 // FIXME: It is really dangerous to store this in the map; if anyone
214 // RAUW's the GV uses of this constant will be invalid.
215 const llvm::Type *LTy = CGM.getTypes().ConvertTypeForMem(D.getType());
216 const llvm::Type *LPtrTy =
217 llvm::PointerType::get(LTy, D.getType().getAddressSpace());
218 DMEntry = llvm::ConstantExpr::getBitCast(GV, LPtrTy);
220 // Emit global variable debug descriptor for static vars.
221 CGDebugInfo *DI = getDebugInfo();
223 DI->setLocation(D.getLocation());
224 DI->EmitGlobalVariable(static_cast<llvm::GlobalVariable *>(GV), &D);
228 unsigned CodeGenFunction::getByRefValueLLVMField(const ValueDecl *VD) const {
229 assert(ByRefValueInfo.count(VD) && "Did not find value!");
231 return ByRefValueInfo.find(VD)->second.second;
234 /// BuildByRefType - This routine changes a __block variable declared as T x
239 /// void *__forwarding;
242 /// void *__copy_helper; // only if needed
243 /// void *__destroy_helper; // only if needed
244 /// char padding[X]; // only if needed
248 const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) {
249 std::pair<const llvm::Type *, unsigned> &Info = ByRefValueInfo[D];
253 QualType Ty = D->getType();
255 std::vector<const llvm::Type *> Types;
257 const llvm::PointerType *Int8PtrTy = llvm::Type::getInt8PtrTy(VMContext);
259 llvm::PATypeHolder ByRefTypeHolder = llvm::OpaqueType::get(VMContext);
262 Types.push_back(Int8PtrTy);
264 // void *__forwarding;
265 Types.push_back(llvm::PointerType::getUnqual(ByRefTypeHolder));
268 Types.push_back(llvm::Type::getInt32Ty(VMContext));
271 Types.push_back(llvm::Type::getInt32Ty(VMContext));
273 bool HasCopyAndDispose = BlockRequiresCopying(Ty);
274 if (HasCopyAndDispose) {
275 /// void *__copy_helper;
276 Types.push_back(Int8PtrTy);
278 /// void *__destroy_helper;
279 Types.push_back(Int8PtrTy);
283 unsigned Align = getContext().getDeclAlignInBytes(D);
284 if (Align > Target.getPointerAlign(0) / 8) {
285 // We have to insert padding.
287 // The struct above has 2 32-bit integers.
288 unsigned CurrentOffsetInBytes = 4 * 2;
290 // And either 2 or 4 pointers.
291 CurrentOffsetInBytes += (HasCopyAndDispose ? 4 : 2) *
292 CGM.getTargetData().getTypeAllocSize(Int8PtrTy);
295 unsigned AlignedOffsetInBytes =
296 llvm::RoundUpToAlignment(CurrentOffsetInBytes, Align);
298 unsigned NumPaddingBytes = AlignedOffsetInBytes - CurrentOffsetInBytes;
299 if (NumPaddingBytes > 0) {
300 const llvm::Type *Ty = llvm::Type::getInt8Ty(VMContext);
301 // FIXME: We need a sema error for alignment larger than the minimum of
302 // the maximal stack alignmint and the alignment of malloc on the system.
303 if (NumPaddingBytes > 1)
304 Ty = llvm::ArrayType::get(Ty, NumPaddingBytes);
308 // We want a packed struct.
314 Types.push_back(ConvertType(Ty));
316 const llvm::Type *T = llvm::StructType::get(VMContext, Types, Packed);
318 cast<llvm::OpaqueType>(ByRefTypeHolder.get())->refineAbstractTypeTo(T);
319 CGM.getModule().addTypeName("struct.__block_byref_" + D->getNameAsString(),
320 ByRefTypeHolder.get());
322 Info.first = ByRefTypeHolder.get();
324 Info.second = Types.size() - 1;
329 /// EmitLocalBlockVarDecl - Emit code and set up an entry in LocalDeclMap for a
330 /// variable declaration with auto, register, or no storage class specifier.
331 /// These turn into simple stack objects, or GlobalValues depending on target.
332 void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) {
333 QualType Ty = D.getType();
334 bool isByRef = D.hasAttr<BlocksAttr>();
335 bool needsDispose = false;
337 bool IsSimpleConstantInitializer = false;
339 llvm::Value *DeclPtr;
340 if (Ty->isConstantSizeType()) {
341 if (!Target.useGlobalsForAutomaticVariables()) {
343 // If this value is an array or struct, is POD, and if the initializer is
344 // a staticly determinable constant, try to optimize it.
345 if (D.getInit() && !isByRef &&
346 (Ty->isArrayType() || Ty->isRecordType()) &&
348 D.getInit()->isConstantInitializer(getContext())) {
349 // If this variable is marked 'const', emit the value as a global.
350 if (CGM.getCodeGenOpts().MergeAllConstants &&
351 Ty.isConstant(getContext())) {
352 EmitStaticBlockVarDecl(D);
356 IsSimpleConstantInitializer = true;
359 // A normal fixed sized variable becomes an alloca in the entry block.
360 const llvm::Type *LTy = ConvertTypeForMem(Ty);
362 LTy = BuildByRefType(&D);
363 llvm::AllocaInst *Alloc = CreateTempAlloca(LTy);
364 Alloc->setName(D.getNameAsString());
366 Align = getContext().getDeclAlignInBytes(&D);
368 Align = std::max(Align, unsigned(Target.getPointerAlign(0) / 8));
369 Alloc->setAlignment(Align);
372 // Targets that don't support recursion emit locals as globals.
374 D.getStorageClass() == VarDecl::Register ? ".reg." : ".auto.";
375 DeclPtr = CreateStaticBlockVarDecl(D, Class,
380 // FIXME: Can this happen?
381 if (Ty->isVariablyModifiedType())
386 if (!DidCallStackSave) {
388 const llvm::Type *LTy = llvm::Type::getInt8PtrTy(VMContext);
389 llvm::Value *Stack = CreateTempAlloca(LTy, "saved_stack");
391 llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::stacksave);
392 llvm::Value *V = Builder.CreateCall(F);
394 Builder.CreateStore(V, Stack);
396 DidCallStackSave = true;
399 // Push a cleanup block and restore the stack there.
400 DelayedCleanupBlock scope(*this);
402 V = Builder.CreateLoad(Stack, "tmp");
403 llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::stackrestore);
404 Builder.CreateCall(F, V);
408 // Get the element type.
409 const llvm::Type *LElemTy = ConvertTypeForMem(Ty);
410 const llvm::Type *LElemPtrTy =
411 llvm::PointerType::get(LElemTy, D.getType().getAddressSpace());
413 llvm::Value *VLASize = EmitVLASize(Ty);
415 // Downcast the VLA size expression
416 VLASize = Builder.CreateIntCast(VLASize, llvm::Type::getInt32Ty(VMContext),
419 // Allocate memory for the array.
420 llvm::AllocaInst *VLA =
421 Builder.CreateAlloca(llvm::Type::getInt8Ty(VMContext), VLASize, "vla");
422 VLA->setAlignment(getContext().getDeclAlignInBytes(&D));
424 DeclPtr = Builder.CreateBitCast(VLA, LElemPtrTy, "tmp");
427 llvm::Value *&DMEntry = LocalDeclMap[&D];
428 assert(DMEntry == 0 && "Decl already exists in localdeclmap!");
431 // Emit debug info for local var declaration.
432 if (CGDebugInfo *DI = getDebugInfo()) {
433 assert(HaveInsertPoint() && "Unexpected unreachable point!");
435 DI->setLocation(D.getLocation());
436 if (Target.useGlobalsForAutomaticVariables()) {
437 DI->EmitGlobalVariable(static_cast<llvm::GlobalVariable *>(DeclPtr), &D);
439 DI->EmitDeclareOfAutoVariable(&D, DeclPtr, Builder);
442 // If this local has an initializer, emit it now.
443 const Expr *Init = D.getInit();
445 // If we are at an unreachable point, we don't need to emit the initializer
446 // unless it contains a label.
447 if (!HaveInsertPoint()) {
448 if (!ContainsLabel(Init))
455 llvm::Value *Loc = DeclPtr;
457 Loc = Builder.CreateStructGEP(DeclPtr, getByRefValueLLVMField(&D),
458 D.getNameAsString());
461 getContext().getCanonicalType(D.getType()).isVolatileQualified();
463 // If the initializer was a simple constant initializer, we can optimize it
465 if (IsSimpleConstantInitializer) {
466 llvm::Constant *Init = CGM.EmitConstantExpr(D.getInit(),D.getType(),this);
467 assert(Init != 0 && "Wasn't a simple constant init?");
469 llvm::Value *AlignVal =
470 llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext), Align);
471 const llvm::Type *IntPtr =
472 llvm::IntegerType::get(VMContext, LLVMPointerWidth);
473 llvm::Value *SizeVal =
474 llvm::ConstantInt::get(IntPtr, getContext().getTypeSizeInBytes(Ty));
476 const llvm::Type *BP = llvm::Type::getInt8PtrTy(VMContext);
477 if (Loc->getType() != BP)
478 Loc = Builder.CreateBitCast(Loc, BP, "tmp");
480 // If the initializer is all zeros, codegen with memset.
481 if (isa<llvm::ConstantAggregateZero>(Init)) {
483 llvm::ConstantInt::get(llvm::Type::getInt8Ty(VMContext), 0);
484 Builder.CreateCall4(CGM.getMemSetFn(), Loc, Zero, SizeVal, AlignVal);
486 // Otherwise, create a temporary global with the initializer then
487 // memcpy from the global to the alloca.
488 std::string Name = GetStaticDeclName(*this, D, ".");
489 llvm::GlobalVariable *GV =
490 new llvm::GlobalVariable(CGM.getModule(), Init->getType(), true,
491 llvm::GlobalValue::InternalLinkage,
492 Init, Name, 0, false, 0);
493 GV->setAlignment(Align);
495 llvm::Value *SrcPtr = GV;
496 if (SrcPtr->getType() != BP)
497 SrcPtr = Builder.CreateBitCast(SrcPtr, BP, "tmp");
499 Builder.CreateCall4(CGM.getMemCpyFn(), Loc, SrcPtr, SizeVal, AlignVal);
501 } else if (Ty->isReferenceType()) {
502 RValue RV = EmitReferenceBindingToExpr(Init, Ty, /*IsInitializer=*/true);
503 EmitStoreOfScalar(RV.getScalarVal(), Loc, false, Ty);
504 } else if (!hasAggregateLLVMType(Init->getType())) {
505 llvm::Value *V = EmitScalarExpr(Init);
506 EmitStoreOfScalar(V, Loc, isVolatile, D.getType());
507 } else if (Init->getType()->isAnyComplexType()) {
508 EmitComplexExprIntoAddr(Init, Loc, isVolatile);
510 EmitAggExpr(Init, Loc, isVolatile);
515 const llvm::PointerType *PtrToInt8Ty = llvm::Type::getInt8PtrTy(VMContext);
518 llvm::Value *isa_field = Builder.CreateStructGEP(DeclPtr, 0);
519 llvm::Value *forwarding_field = Builder.CreateStructGEP(DeclPtr, 1);
520 llvm::Value *flags_field = Builder.CreateStructGEP(DeclPtr, 2);
521 llvm::Value *size_field = Builder.CreateStructGEP(DeclPtr, 3);
528 if (Ty->isBlockPointerType()) {
529 flag |= BLOCK_FIELD_IS_BLOCK;
530 flags |= BLOCK_HAS_COPY_DISPOSE;
531 } else if (BlockRequiresCopying(Ty)) {
532 flag |= BLOCK_FIELD_IS_OBJECT;
533 flags |= BLOCK_HAS_COPY_DISPOSE;
536 // FIXME: Someone double check this.
537 if (Ty.isObjCGCWeak())
538 flag |= BLOCK_FIELD_IS_WEAK;
541 if (flag&BLOCK_FIELD_IS_WEAK)
543 V = llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext), isa);
544 V = Builder.CreateIntToPtr(V, PtrToInt8Ty, "isa");
545 Builder.CreateStore(V, isa_field);
547 Builder.CreateStore(DeclPtr, forwarding_field);
549 V = llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext), flags);
550 Builder.CreateStore(V, flags_field);
552 const llvm::Type *V1;
553 V1 = cast<llvm::PointerType>(DeclPtr->getType())->getElementType();
554 V = llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext),
555 (CGM.getTargetData().getTypeStoreSizeInBits(V1)
557 Builder.CreateStore(V, size_field);
559 if (flags & BLOCK_HAS_COPY_DISPOSE) {
560 BlockHasCopyDispose = true;
561 llvm::Value *copy_helper = Builder.CreateStructGEP(DeclPtr, 4);
562 Builder.CreateStore(BuildbyrefCopyHelper(DeclPtr->getType(), flag, Align),
565 llvm::Value *destroy_helper = Builder.CreateStructGEP(DeclPtr, 5);
566 Builder.CreateStore(BuildbyrefDestroyHelper(DeclPtr->getType(), flag,
572 // Handle CXX destruction of variables.
574 while (const ArrayType *Array = getContext().getAsArrayType(DtorTy))
575 DtorTy = getContext().getBaseElementType(Array);
576 if (const RecordType *RT = DtorTy->getAs<RecordType>())
577 if (CXXRecordDecl *ClassDecl = dyn_cast<CXXRecordDecl>(RT->getDecl())) {
578 if (!ClassDecl->hasTrivialDestructor()) {
579 const CXXDestructorDecl *D = ClassDecl->getDestructor(getContext());
580 assert(D && "EmitLocalBlockVarDecl - destructor is nul");
582 if (const ConstantArrayType *Array =
583 getContext().getAsConstantArrayType(Ty)) {
585 DelayedCleanupBlock Scope(*this);
586 QualType BaseElementTy = getContext().getBaseElementType(Array);
587 const llvm::Type *BasePtr = ConvertType(BaseElementTy);
588 BasePtr = llvm::PointerType::getUnqual(BasePtr);
589 llvm::Value *BaseAddrPtr =
590 Builder.CreateBitCast(DeclPtr, BasePtr);
591 EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
593 // Make sure to jump to the exit block.
594 EmitBranch(Scope.getCleanupExitBlock());
597 EHCleanupBlock Cleanup(*this);
598 QualType BaseElementTy = getContext().getBaseElementType(Array);
599 const llvm::Type *BasePtr = ConvertType(BaseElementTy);
600 BasePtr = llvm::PointerType::getUnqual(BasePtr);
601 llvm::Value *BaseAddrPtr =
602 Builder.CreateBitCast(DeclPtr, BasePtr);
603 EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr);
607 DelayedCleanupBlock Scope(*this);
608 EmitCXXDestructorCall(D, Dtor_Complete, DeclPtr);
610 // Make sure to jump to the exit block.
611 EmitBranch(Scope.getCleanupExitBlock());
614 EHCleanupBlock Cleanup(*this);
615 EmitCXXDestructorCall(D, Dtor_Complete, DeclPtr);
621 // Handle the cleanup attribute
622 if (const CleanupAttr *CA = D.getAttr<CleanupAttr>()) {
623 const FunctionDecl *FD = CA->getFunctionDecl();
625 llvm::Constant* F = CGM.GetAddrOfFunction(FD);
626 assert(F && "Could not find function!");
628 const CGFunctionInfo &Info = CGM.getTypes().getFunctionInfo(FD);
630 // In some cases, the type of the function argument will be different from
631 // the type of the pointer. An example of this is
632 // void f(void* arg);
633 // __attribute__((cleanup(f))) void *g;
635 // To fix this we insert a bitcast here.
636 QualType ArgTy = Info.arg_begin()->type;
638 DelayedCleanupBlock scope(*this);
641 Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr,
642 ConvertType(ArgTy))),
643 getContext().getPointerType(D.getType())));
644 EmitCall(Info, F, Args);
647 EHCleanupBlock Cleanup(*this);
650 Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr,
651 ConvertType(ArgTy))),
652 getContext().getPointerType(D.getType())));
653 EmitCall(Info, F, Args);
657 if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly) {
659 DelayedCleanupBlock scope(*this);
660 llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding");
661 V = Builder.CreateLoad(V);
662 BuildBlockRelease(V);
664 // FIXME: Turn this on and audit the codegen
665 if (0 && Exceptions) {
666 EHCleanupBlock Cleanup(*this);
667 llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding");
668 V = Builder.CreateLoad(V);
669 BuildBlockRelease(V);
674 /// Emit an alloca (or GlobalValue depending on target)
675 /// for the specified parameter and set up LocalDeclMap.
676 void CodeGenFunction::EmitParmDecl(const VarDecl &D, llvm::Value *Arg) {
677 // FIXME: Why isn't ImplicitParamDecl a ParmVarDecl?
678 assert((isa<ParmVarDecl>(D) || isa<ImplicitParamDecl>(D)) &&
679 "Invalid argument to EmitParmDecl");
680 QualType Ty = D.getType();
681 CanQualType CTy = getContext().getCanonicalType(Ty);
683 llvm::Value *DeclPtr;
684 if (!Ty->isConstantSizeType()) {
685 // Variable sized values always are passed by-reference.
688 // A fixed sized single-value variable becomes an alloca in the entry block.
689 const llvm::Type *LTy = ConvertTypeForMem(Ty);
690 if (LTy->isSingleValueType()) {
692 DeclPtr = CreateTempAlloca(LTy);
693 DeclPtr->setName(D.getNameAsString() + llvm::StringRef(".addr"));
695 // Store the initial value into the alloca.
696 EmitStoreOfScalar(Arg, DeclPtr, CTy.isVolatileQualified(), Ty);
698 // Otherwise, if this is an aggregate, just use the input pointer.
701 Arg->setName(D.getNameAsString());
704 llvm::Value *&DMEntry = LocalDeclMap[&D];
705 assert(DMEntry == 0 && "Decl already exists in localdeclmap!");
708 // Emit debug info for param declaration.
709 if (CGDebugInfo *DI = getDebugInfo()) {
710 DI->setLocation(D.getLocation());
711 DI->EmitDeclareOfArgVariable(&D, DeclPtr, Builder);