]> CyberLeo.Net >> Repos - FreeBSD/stable/9.git/blob - contrib/llvm/tools/clang/lib/CodeGen/CGCleanup.cpp
Copy head to stable/9 as part of 9.0-RELEASE release cycle.
[FreeBSD/stable/9.git] / contrib / llvm / tools / clang / lib / CodeGen / CGCleanup.cpp
1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file contains code dealing with the IR generation for cleanups
11 // and related information.
12 //
13 // A "cleanup" is a piece of code which needs to be executed whenever
14 // control transfers out of a particular scope.  This can be
15 // conditionalized to occur only on exceptional control flow, only on
16 // normal control flow, or both.
17 //
18 //===----------------------------------------------------------------------===//
19
20 #include "CodeGenFunction.h"
21 #include "CGCleanup.h"
22
23 using namespace clang;
24 using namespace CodeGen;
25
26 bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27   if (rv.isScalar())
28     return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29   if (rv.isAggregate())
30     return DominatingLLVMValue::needsSaving(rv.getAggregateAddr());
31   return true;
32 }
33
34 DominatingValue<RValue>::saved_type
35 DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36   if (rv.isScalar()) {
37     llvm::Value *V = rv.getScalarVal();
38
39     // These automatically dominate and don't need to be saved.
40     if (!DominatingLLVMValue::needsSaving(V))
41       return saved_type(V, ScalarLiteral);
42
43     // Everything else needs an alloca.
44     llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue");
45     CGF.Builder.CreateStore(V, addr);
46     return saved_type(addr, ScalarAddress);
47   }
48
49   if (rv.isComplex()) {
50     CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
51     const llvm::Type *ComplexTy =
52       llvm::StructType::get(V.first->getType(), V.second->getType(),
53                             (void*) 0);
54     llvm::Value *addr = CGF.CreateTempAlloca(ComplexTy, "saved-complex");
55     CGF.StoreComplexToAddr(V, addr, /*volatile*/ false);
56     return saved_type(addr, ComplexAddress);
57   }
58
59   assert(rv.isAggregate());
60   llvm::Value *V = rv.getAggregateAddr(); // TODO: volatile?
61   if (!DominatingLLVMValue::needsSaving(V))
62     return saved_type(V, AggregateLiteral);
63
64   llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue");
65   CGF.Builder.CreateStore(V, addr);
66   return saved_type(addr, AggregateAddress);  
67 }
68
69 /// Given a saved r-value produced by SaveRValue, perform the code
70 /// necessary to restore it to usability at the current insertion
71 /// point.
72 RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
73   switch (K) {
74   case ScalarLiteral:
75     return RValue::get(Value);
76   case ScalarAddress:
77     return RValue::get(CGF.Builder.CreateLoad(Value));
78   case AggregateLiteral:
79     return RValue::getAggregate(Value);
80   case AggregateAddress:
81     return RValue::getAggregate(CGF.Builder.CreateLoad(Value));
82   case ComplexAddress:
83     return RValue::getComplex(CGF.LoadComplexFromAddr(Value, false));
84   }
85
86   llvm_unreachable("bad saved r-value kind");
87   return RValue();
88 }
89
90 /// Push an entry of the given size onto this protected-scope stack.
91 char *EHScopeStack::allocate(size_t Size) {
92   if (!StartOfBuffer) {
93     unsigned Capacity = 1024;
94     while (Capacity < Size) Capacity *= 2;
95     StartOfBuffer = new char[Capacity];
96     StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
97   } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
98     unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
99     unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
100
101     unsigned NewCapacity = CurrentCapacity;
102     do {
103       NewCapacity *= 2;
104     } while (NewCapacity < UsedCapacity + Size);
105
106     char *NewStartOfBuffer = new char[NewCapacity];
107     char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
108     char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
109     memcpy(NewStartOfData, StartOfData, UsedCapacity);
110     delete [] StartOfBuffer;
111     StartOfBuffer = NewStartOfBuffer;
112     EndOfBuffer = NewEndOfBuffer;
113     StartOfData = NewStartOfData;
114   }
115
116   assert(StartOfBuffer + Size <= StartOfData);
117   StartOfData -= Size;
118   return StartOfData;
119 }
120
121 EHScopeStack::stable_iterator
122 EHScopeStack::getEnclosingEHCleanup(iterator it) const {
123   assert(it != end());
124   do {
125     if (isa<EHCleanupScope>(*it)) {
126       if (cast<EHCleanupScope>(*it).isEHCleanup())
127         return stabilize(it);
128       return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
129     }
130     ++it;
131   } while (it != end());
132   return stable_end();
133 }
134
135
136 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
137   assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
138   char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
139   bool IsNormalCleanup = Kind & NormalCleanup;
140   bool IsEHCleanup = Kind & EHCleanup;
141   bool IsActive = !(Kind & InactiveCleanup);
142   EHCleanupScope *Scope =
143     new (Buffer) EHCleanupScope(IsNormalCleanup,
144                                 IsEHCleanup,
145                                 IsActive,
146                                 Size,
147                                 BranchFixups.size(),
148                                 InnermostNormalCleanup,
149                                 InnermostEHCleanup);
150   if (IsNormalCleanup)
151     InnermostNormalCleanup = stable_begin();
152   if (IsEHCleanup)
153     InnermostEHCleanup = stable_begin();
154
155   return Scope->getCleanupBuffer();
156 }
157
158 void EHScopeStack::popCleanup() {
159   assert(!empty() && "popping exception stack when not empty");
160
161   assert(isa<EHCleanupScope>(*begin()));
162   EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
163   InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
164   InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
165   StartOfData += Cleanup.getAllocatedSize();
166
167   if (empty()) NextEHDestIndex = FirstEHDestIndex;
168
169   // Destroy the cleanup.
170   Cleanup.~EHCleanupScope();
171
172   // Check whether we can shrink the branch-fixups stack.
173   if (!BranchFixups.empty()) {
174     // If we no longer have any normal cleanups, all the fixups are
175     // complete.
176     if (!hasNormalCleanups())
177       BranchFixups.clear();
178
179     // Otherwise we can still trim out unnecessary nulls.
180     else
181       popNullFixups();
182   }
183 }
184
185 EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) {
186   char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters));
187   CatchDepth++;
188   return new (Buffer) EHFilterScope(NumFilters);
189 }
190
191 void EHScopeStack::popFilter() {
192   assert(!empty() && "popping exception stack when not empty");
193
194   EHFilterScope &Filter = cast<EHFilterScope>(*begin());
195   StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters());
196
197   if (empty()) NextEHDestIndex = FirstEHDestIndex;
198
199   assert(CatchDepth > 0 && "mismatched filter push/pop");
200   CatchDepth--;
201 }
202
203 EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) {
204   char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers));
205   CatchDepth++;
206   EHCatchScope *Scope = new (Buffer) EHCatchScope(NumHandlers);
207   for (unsigned I = 0; I != NumHandlers; ++I)
208     Scope->getHandlers()[I].Index = getNextEHDestIndex();
209   return Scope;
210 }
211
212 void EHScopeStack::pushTerminate() {
213   char *Buffer = allocate(EHTerminateScope::getSize());
214   CatchDepth++;
215   new (Buffer) EHTerminateScope(getNextEHDestIndex());
216 }
217
218 /// Remove any 'null' fixups on the stack.  However, we can't pop more
219 /// fixups than the fixup depth on the innermost normal cleanup, or
220 /// else fixups that we try to add to that cleanup will end up in the
221 /// wrong place.  We *could* try to shrink fixup depths, but that's
222 /// actually a lot of work for little benefit.
223 void EHScopeStack::popNullFixups() {
224   // We expect this to only be called when there's still an innermost
225   // normal cleanup;  otherwise there really shouldn't be any fixups.
226   assert(hasNormalCleanups());
227
228   EHScopeStack::iterator it = find(InnermostNormalCleanup);
229   unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
230   assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
231
232   while (BranchFixups.size() > MinSize &&
233          BranchFixups.back().Destination == 0)
234     BranchFixups.pop_back();
235 }
236
237 void CodeGenFunction::initFullExprCleanup() {
238   // Create a variable to decide whether the cleanup needs to be run.
239   llvm::AllocaInst *active
240     = CreateTempAlloca(Builder.getInt1Ty(), "cleanup.cond");
241
242   // Initialize it to false at a site that's guaranteed to be run
243   // before each evaluation.
244   llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
245   new llvm::StoreInst(Builder.getFalse(), active, &block->back());
246
247   // Initialize it to true at the current location.
248   Builder.CreateStore(Builder.getTrue(), active);
249
250   // Set that as the active flag in the cleanup.
251   EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
252   assert(cleanup.getActiveFlag() == 0 && "cleanup already has active flag?");
253   cleanup.setActiveFlag(active);
254
255   if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
256   if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
257 }
258
259 void EHScopeStack::Cleanup::anchor() {}
260
261 /// All the branch fixups on the EH stack have propagated out past the
262 /// outermost normal cleanup; resolve them all by adding cases to the
263 /// given switch instruction.
264 static void ResolveAllBranchFixups(CodeGenFunction &CGF,
265                                    llvm::SwitchInst *Switch,
266                                    llvm::BasicBlock *CleanupEntry) {
267   llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
268
269   for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
270     // Skip this fixup if its destination isn't set.
271     BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
272     if (Fixup.Destination == 0) continue;
273
274     // If there isn't an OptimisticBranchBlock, then InitialBranch is
275     // still pointing directly to its destination; forward it to the
276     // appropriate cleanup entry.  This is required in the specific
277     // case of
278     //   { std::string s; goto lbl; }
279     //   lbl:
280     // i.e. where there's an unresolved fixup inside a single cleanup
281     // entry which we're currently popping.
282     if (Fixup.OptimisticBranchBlock == 0) {
283       new llvm::StoreInst(CGF.Builder.getInt32(Fixup.DestinationIndex),
284                           CGF.getNormalCleanupDestSlot(),
285                           Fixup.InitialBranch);
286       Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
287     }
288
289     // Don't add this case to the switch statement twice.
290     if (!CasesAdded.insert(Fixup.Destination)) continue;
291
292     Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
293                     Fixup.Destination);
294   }
295
296   CGF.EHStack.clearFixups();
297 }
298
299 /// Transitions the terminator of the given exit-block of a cleanup to
300 /// be a cleanup switch.
301 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
302                                                    llvm::BasicBlock *Block) {
303   // If it's a branch, turn it into a switch whose default
304   // destination is its original target.
305   llvm::TerminatorInst *Term = Block->getTerminator();
306   assert(Term && "can't transition block without terminator");
307
308   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
309     assert(Br->isUnconditional());
310     llvm::LoadInst *Load =
311       new llvm::LoadInst(CGF.getNormalCleanupDestSlot(), "cleanup.dest", Term);
312     llvm::SwitchInst *Switch =
313       llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
314     Br->eraseFromParent();
315     return Switch;
316   } else {
317     return cast<llvm::SwitchInst>(Term);
318   }
319 }
320
321 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
322   assert(Block && "resolving a null target block");
323   if (!EHStack.getNumBranchFixups()) return;
324
325   assert(EHStack.hasNormalCleanups() &&
326          "branch fixups exist with no normal cleanups on stack");
327
328   llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
329   bool ResolvedAny = false;
330
331   for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
332     // Skip this fixup if its destination doesn't match.
333     BranchFixup &Fixup = EHStack.getBranchFixup(I);
334     if (Fixup.Destination != Block) continue;
335
336     Fixup.Destination = 0;
337     ResolvedAny = true;
338
339     // If it doesn't have an optimistic branch block, LatestBranch is
340     // already pointing to the right place.
341     llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
342     if (!BranchBB)
343       continue;
344
345     // Don't process the same optimistic branch block twice.
346     if (!ModifiedOptimisticBlocks.insert(BranchBB))
347       continue;
348
349     llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
350
351     // Add a case to the switch.
352     Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
353   }
354
355   if (ResolvedAny)
356     EHStack.popNullFixups();
357 }
358
359 /// Pops cleanup blocks until the given savepoint is reached.
360 void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) {
361   assert(Old.isValid());
362
363   while (EHStack.stable_begin() != Old) {
364     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
365
366     // As long as Old strictly encloses the scope's enclosing normal
367     // cleanup, we're going to emit another normal cleanup which
368     // fallthrough can propagate through.
369     bool FallThroughIsBranchThrough =
370       Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
371
372     PopCleanupBlock(FallThroughIsBranchThrough);
373   }
374 }
375
376 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
377                                            EHCleanupScope &Scope) {
378   assert(Scope.isNormalCleanup());
379   llvm::BasicBlock *Entry = Scope.getNormalBlock();
380   if (!Entry) {
381     Entry = CGF.createBasicBlock("cleanup");
382     Scope.setNormalBlock(Entry);
383   }
384   return Entry;
385 }
386
387 static llvm::BasicBlock *CreateEHEntry(CodeGenFunction &CGF,
388                                        EHCleanupScope &Scope) {
389   assert(Scope.isEHCleanup());
390   llvm::BasicBlock *Entry = Scope.getEHBlock();
391   if (!Entry) {
392     Entry = CGF.createBasicBlock("eh.cleanup");
393     Scope.setEHBlock(Entry);
394   }
395   return Entry;
396 }
397
398 /// Attempts to reduce a cleanup's entry block to a fallthrough.  This
399 /// is basically llvm::MergeBlockIntoPredecessor, except
400 /// simplified/optimized for the tighter constraints on cleanup blocks.
401 ///
402 /// Returns the new block, whatever it is.
403 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
404                                               llvm::BasicBlock *Entry) {
405   llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
406   if (!Pred) return Entry;
407
408   llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
409   if (!Br || Br->isConditional()) return Entry;
410   assert(Br->getSuccessor(0) == Entry);
411
412   // If we were previously inserting at the end of the cleanup entry
413   // block, we'll need to continue inserting at the end of the
414   // predecessor.
415   bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
416   assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
417
418   // Kill the branch.
419   Br->eraseFromParent();
420
421   // Replace all uses of the entry with the predecessor, in case there
422   // are phis in the cleanup.
423   Entry->replaceAllUsesWith(Pred);
424
425   // Merge the blocks.
426   Pred->getInstList().splice(Pred->end(), Entry->getInstList());
427
428   // Kill the entry block.
429   Entry->eraseFromParent();
430
431   if (WasInsertBlock)
432     CGF.Builder.SetInsertPoint(Pred);
433
434   return Pred;
435 }
436
437 static void EmitCleanup(CodeGenFunction &CGF,
438                         EHScopeStack::Cleanup *Fn,
439                         EHScopeStack::Cleanup::Flags flags,
440                         llvm::Value *ActiveFlag) {
441   // EH cleanups always occur within a terminate scope.
442   if (flags.isForEHCleanup()) CGF.EHStack.pushTerminate();
443
444   // If there's an active flag, load it and skip the cleanup if it's
445   // false.
446   llvm::BasicBlock *ContBB = 0;
447   if (ActiveFlag) {
448     ContBB = CGF.createBasicBlock("cleanup.done");
449     llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
450     llvm::Value *IsActive
451       = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
452     CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
453     CGF.EmitBlock(CleanupBB);
454   }
455
456   // Ask the cleanup to emit itself.
457   Fn->Emit(CGF, flags);
458   assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
459
460   // Emit the continuation block if there was an active flag.
461   if (ActiveFlag)
462     CGF.EmitBlock(ContBB);
463
464   // Leave the terminate scope.
465   if (flags.isForEHCleanup()) CGF.EHStack.popTerminate();
466 }
467
468 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
469                                           llvm::BasicBlock *From,
470                                           llvm::BasicBlock *To) {
471   // Exit is the exit block of a cleanup, so it always terminates in
472   // an unconditional branch or a switch.
473   llvm::TerminatorInst *Term = Exit->getTerminator();
474
475   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
476     assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
477     Br->setSuccessor(0, To);
478   } else {
479     llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
480     for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
481       if (Switch->getSuccessor(I) == From)
482         Switch->setSuccessor(I, To);
483   }
484 }
485
486 /// Pops a cleanup block.  If the block includes a normal cleanup, the
487 /// current insertion point is threaded through the cleanup, as are
488 /// any branch fixups on the cleanup.
489 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
490   assert(!EHStack.empty() && "cleanup stack is empty!");
491   assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
492   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
493   assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
494
495   // Remember activation information.
496   bool IsActive = Scope.isActive();
497   llvm::Value *NormalActiveFlag =
498     Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag() : 0;
499   llvm::Value *EHActiveFlag = 
500     Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag() : 0;
501
502   // Check whether we need an EH cleanup.  This is only true if we've
503   // generated a lazy EH cleanup block.
504   bool RequiresEHCleanup = Scope.hasEHBranches();
505
506   // Check the three conditions which might require a normal cleanup:
507
508   // - whether there are branch fix-ups through this cleanup
509   unsigned FixupDepth = Scope.getFixupDepth();
510   bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
511
512   // - whether there are branch-throughs or branch-afters
513   bool HasExistingBranches = Scope.hasBranches();
514
515   // - whether there's a fallthrough
516   llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
517   bool HasFallthrough = (FallthroughSource != 0 && IsActive);
518
519   // Branch-through fall-throughs leave the insertion point set to the
520   // end of the last cleanup, which points to the current scope.  The
521   // rest of IR gen doesn't need to worry about this; it only happens
522   // during the execution of PopCleanupBlocks().
523   bool HasPrebranchedFallthrough =
524     (FallthroughSource && FallthroughSource->getTerminator());
525
526   // If this is a normal cleanup, then having a prebranched
527   // fallthrough implies that the fallthrough source unconditionally
528   // jumps here.
529   assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
530          (Scope.getNormalBlock() &&
531           FallthroughSource->getTerminator()->getSuccessor(0)
532             == Scope.getNormalBlock()));
533
534   bool RequiresNormalCleanup = false;
535   if (Scope.isNormalCleanup() &&
536       (HasFixups || HasExistingBranches || HasFallthrough)) {
537     RequiresNormalCleanup = true;
538   }
539
540   EHScopeStack::Cleanup::Flags cleanupFlags;
541   if (Scope.isNormalCleanup())
542     cleanupFlags.setIsNormalCleanupKind();
543   if (Scope.isEHCleanup())
544     cleanupFlags.setIsEHCleanupKind();
545
546   // Even if we don't need the normal cleanup, we might still have
547   // prebranched fallthrough to worry about.
548   if (Scope.isNormalCleanup() && !RequiresNormalCleanup &&
549       HasPrebranchedFallthrough) {
550     assert(!IsActive);
551
552     llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
553
554     // If we're branching through this cleanup, just forward the
555     // prebranched fallthrough to the next cleanup, leaving the insert
556     // point in the old block.
557     if (FallthroughIsBranchThrough) {
558       EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
559       llvm::BasicBlock *EnclosingEntry = 
560         CreateNormalEntry(*this, cast<EHCleanupScope>(S));
561
562       ForwardPrebranchedFallthrough(FallthroughSource,
563                                     NormalEntry, EnclosingEntry);
564       assert(NormalEntry->use_empty() &&
565              "uses of entry remain after forwarding?");
566       delete NormalEntry;
567
568     // Otherwise, we're branching out;  just emit the next block.
569     } else {
570       EmitBlock(NormalEntry);
571       SimplifyCleanupEntry(*this, NormalEntry);
572     }
573   }
574
575   // If we don't need the cleanup at all, we're done.
576   if (!RequiresNormalCleanup && !RequiresEHCleanup) {
577     EHStack.popCleanup(); // safe because there are no fixups
578     assert(EHStack.getNumBranchFixups() == 0 ||
579            EHStack.hasNormalCleanups());
580     return;
581   }
582
583   // Copy the cleanup emission data out.  Note that SmallVector
584   // guarantees maximal alignment for its buffer regardless of its
585   // type parameter.
586   llvm::SmallVector<char, 8*sizeof(void*)> CleanupBuffer;
587   CleanupBuffer.reserve(Scope.getCleanupSize());
588   memcpy(CleanupBuffer.data(),
589          Scope.getCleanupBuffer(), Scope.getCleanupSize());
590   CleanupBuffer.set_size(Scope.getCleanupSize());
591   EHScopeStack::Cleanup *Fn =
592     reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
593
594   // We want to emit the EH cleanup after the normal cleanup, but go
595   // ahead and do the setup for the EH cleanup while the scope is still
596   // alive.
597   llvm::BasicBlock *EHEntry = 0;
598   llvm::SmallVector<llvm::Instruction*, 2> EHInstsToAppend;
599   if (RequiresEHCleanup) {
600     EHEntry = CreateEHEntry(*this, Scope);
601
602     // Figure out the branch-through dest if necessary.
603     llvm::BasicBlock *EHBranchThroughDest = 0;
604     if (Scope.hasEHBranchThroughs()) {
605       assert(Scope.getEnclosingEHCleanup() != EHStack.stable_end());
606       EHScope &S = *EHStack.find(Scope.getEnclosingEHCleanup());
607       EHBranchThroughDest = CreateEHEntry(*this, cast<EHCleanupScope>(S));
608     }
609
610     // If we have exactly one branch-after and no branch-throughs, we
611     // can dispatch it without a switch.
612     if (!Scope.hasEHBranchThroughs() &&
613         Scope.getNumEHBranchAfters() == 1) {
614       assert(!EHBranchThroughDest);
615
616       // TODO: remove the spurious eh.cleanup.dest stores if this edge
617       // never went through any switches.
618       llvm::BasicBlock *BranchAfterDest = Scope.getEHBranchAfterBlock(0);
619       EHInstsToAppend.push_back(llvm::BranchInst::Create(BranchAfterDest));
620     
621     // Otherwise, if we have any branch-afters, we need a switch.
622     } else if (Scope.getNumEHBranchAfters()) {
623       // The default of the switch belongs to the branch-throughs if
624       // they exist.
625       llvm::BasicBlock *Default =
626         (EHBranchThroughDest ? EHBranchThroughDest : getUnreachableBlock());
627
628       const unsigned SwitchCapacity = Scope.getNumEHBranchAfters();
629
630       llvm::LoadInst *Load =
631         new llvm::LoadInst(getEHCleanupDestSlot(), "cleanup.dest");
632       llvm::SwitchInst *Switch =
633         llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
634
635       EHInstsToAppend.push_back(Load);
636       EHInstsToAppend.push_back(Switch);
637
638       for (unsigned I = 0, E = Scope.getNumEHBranchAfters(); I != E; ++I)
639         Switch->addCase(Scope.getEHBranchAfterIndex(I),
640                         Scope.getEHBranchAfterBlock(I));
641
642     // Otherwise, we have only branch-throughs; jump to the next EH
643     // cleanup.
644     } else {
645       assert(EHBranchThroughDest);
646       EHInstsToAppend.push_back(llvm::BranchInst::Create(EHBranchThroughDest));
647     }
648   }
649
650   if (!RequiresNormalCleanup) {
651     EHStack.popCleanup();
652   } else {
653     // If we have a fallthrough and no other need for the cleanup,
654     // emit it directly.
655     if (HasFallthrough && !HasPrebranchedFallthrough &&
656         !HasFixups && !HasExistingBranches) {
657
658       // Fixups can cause us to optimistically create a normal block,
659       // only to later have no real uses for it.  Just delete it in
660       // this case.
661       // TODO: we can potentially simplify all the uses after this.
662       if (Scope.getNormalBlock()) {
663         Scope.getNormalBlock()->replaceAllUsesWith(getUnreachableBlock());
664         delete Scope.getNormalBlock();
665       }
666
667       EHStack.popCleanup();
668
669       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
670
671     // Otherwise, the best approach is to thread everything through
672     // the cleanup block and then try to clean up after ourselves.
673     } else {
674       // Force the entry block to exist.
675       llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
676
677       // I.  Set up the fallthrough edge in.
678
679       // If there's a fallthrough, we need to store the cleanup
680       // destination index.  For fall-throughs this is always zero.
681       if (HasFallthrough) {
682         if (!HasPrebranchedFallthrough)
683           Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
684
685       // Otherwise, clear the IP if we don't have fallthrough because
686       // the cleanup is inactive.  We don't need to save it because
687       // it's still just FallthroughSource.
688       } else if (FallthroughSource) {
689         assert(!IsActive && "source without fallthrough for active cleanup");
690         Builder.ClearInsertionPoint();
691       }
692
693       // II.  Emit the entry block.  This implicitly branches to it if
694       // we have fallthrough.  All the fixups and existing branches
695       // should already be branched to it.
696       EmitBlock(NormalEntry);
697
698       // III.  Figure out where we're going and build the cleanup
699       // epilogue.
700
701       bool HasEnclosingCleanups =
702         (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
703
704       // Compute the branch-through dest if we need it:
705       //   - if there are branch-throughs threaded through the scope
706       //   - if fall-through is a branch-through
707       //   - if there are fixups that will be optimistically forwarded
708       //     to the enclosing cleanup
709       llvm::BasicBlock *BranchThroughDest = 0;
710       if (Scope.hasBranchThroughs() ||
711           (FallthroughSource && FallthroughIsBranchThrough) ||
712           (HasFixups && HasEnclosingCleanups)) {
713         assert(HasEnclosingCleanups);
714         EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
715         BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
716       }
717
718       llvm::BasicBlock *FallthroughDest = 0;
719       llvm::SmallVector<llvm::Instruction*, 2> InstsToAppend;
720
721       // If there's exactly one branch-after and no other threads,
722       // we can route it without a switch.
723       if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
724           Scope.getNumBranchAfters() == 1) {
725         assert(!BranchThroughDest || !IsActive);
726
727         // TODO: clean up the possibly dead stores to the cleanup dest slot.
728         llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
729         InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
730
731       // Build a switch-out if we need it:
732       //   - if there are branch-afters threaded through the scope
733       //   - if fall-through is a branch-after
734       //   - if there are fixups that have nowhere left to go and
735       //     so must be immediately resolved
736       } else if (Scope.getNumBranchAfters() ||
737                  (HasFallthrough && !FallthroughIsBranchThrough) ||
738                  (HasFixups && !HasEnclosingCleanups)) {
739
740         llvm::BasicBlock *Default =
741           (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
742
743         // TODO: base this on the number of branch-afters and fixups
744         const unsigned SwitchCapacity = 10;
745
746         llvm::LoadInst *Load =
747           new llvm::LoadInst(getNormalCleanupDestSlot(), "cleanup.dest");
748         llvm::SwitchInst *Switch =
749           llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
750
751         InstsToAppend.push_back(Load);
752         InstsToAppend.push_back(Switch);
753
754         // Branch-after fallthrough.
755         if (FallthroughSource && !FallthroughIsBranchThrough) {
756           FallthroughDest = createBasicBlock("cleanup.cont");
757           if (HasFallthrough)
758             Switch->addCase(Builder.getInt32(0), FallthroughDest);
759         }
760
761         for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
762           Switch->addCase(Scope.getBranchAfterIndex(I),
763                           Scope.getBranchAfterBlock(I));
764         }
765
766         // If there aren't any enclosing cleanups, we can resolve all
767         // the fixups now.
768         if (HasFixups && !HasEnclosingCleanups)
769           ResolveAllBranchFixups(*this, Switch, NormalEntry);
770       } else {
771         // We should always have a branch-through destination in this case.
772         assert(BranchThroughDest);
773         InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
774       }
775
776       // IV.  Pop the cleanup and emit it.
777       EHStack.popCleanup();
778       assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
779
780       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
781
782       // Append the prepared cleanup prologue from above.
783       llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
784       for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
785         NormalExit->getInstList().push_back(InstsToAppend[I]);
786
787       // Optimistically hope that any fixups will continue falling through.
788       for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
789            I < E; ++I) {
790         BranchFixup &Fixup = EHStack.getBranchFixup(I);
791         if (!Fixup.Destination) continue;
792         if (!Fixup.OptimisticBranchBlock) {
793           new llvm::StoreInst(Builder.getInt32(Fixup.DestinationIndex),
794                               getNormalCleanupDestSlot(),
795                               Fixup.InitialBranch);
796           Fixup.InitialBranch->setSuccessor(0, NormalEntry);
797         }
798         Fixup.OptimisticBranchBlock = NormalExit;
799       }
800
801       // V.  Set up the fallthrough edge out.
802       
803       // Case 1: a fallthrough source exists but shouldn't branch to
804       // the cleanup because the cleanup is inactive.
805       if (!HasFallthrough && FallthroughSource) {
806         assert(!IsActive);
807
808         // If we have a prebranched fallthrough, that needs to be
809         // forwarded to the right block.
810         if (HasPrebranchedFallthrough) {
811           llvm::BasicBlock *Next;
812           if (FallthroughIsBranchThrough) {
813             Next = BranchThroughDest;
814             assert(!FallthroughDest);
815           } else {
816             Next = FallthroughDest;
817           }
818
819           ForwardPrebranchedFallthrough(FallthroughSource, NormalEntry, Next);
820         }
821         Builder.SetInsertPoint(FallthroughSource);
822
823       // Case 2: a fallthrough source exists and should branch to the
824       // cleanup, but we're not supposed to branch through to the next
825       // cleanup.
826       } else if (HasFallthrough && FallthroughDest) {
827         assert(!FallthroughIsBranchThrough);
828         EmitBlock(FallthroughDest);
829
830       // Case 3: a fallthrough source exists and should branch to the
831       // cleanup and then through to the next.
832       } else if (HasFallthrough) {
833         // Everything is already set up for this.
834
835       // Case 4: no fallthrough source exists.
836       } else {
837         Builder.ClearInsertionPoint();
838       }
839
840       // VI.  Assorted cleaning.
841
842       // Check whether we can merge NormalEntry into a single predecessor.
843       // This might invalidate (non-IR) pointers to NormalEntry.
844       llvm::BasicBlock *NewNormalEntry =
845         SimplifyCleanupEntry(*this, NormalEntry);
846
847       // If it did invalidate those pointers, and NormalEntry was the same
848       // as NormalExit, go back and patch up the fixups.
849       if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
850         for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
851                I < E; ++I)
852           EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
853     }
854   }
855
856   assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
857
858   // Emit the EH cleanup if required.
859   if (RequiresEHCleanup) {
860     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
861
862     EmitBlock(EHEntry);
863
864     cleanupFlags.setIsForEHCleanup();
865     EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
866
867     // Append the prepared cleanup prologue from above.
868     llvm::BasicBlock *EHExit = Builder.GetInsertBlock();
869     for (unsigned I = 0, E = EHInstsToAppend.size(); I != E; ++I)
870       EHExit->getInstList().push_back(EHInstsToAppend[I]);
871
872     Builder.restoreIP(SavedIP);
873
874     SimplifyCleanupEntry(*this, EHEntry);
875   }
876 }
877
878 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
879 /// specified destination obviously has no cleanups to run.  'false' is always
880 /// a conservatively correct answer for this method.
881 bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
882   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
883          && "stale jump destination");
884   
885   // Calculate the innermost active normal cleanup.
886   EHScopeStack::stable_iterator TopCleanup =
887     EHStack.getInnermostActiveNormalCleanup();
888   
889   // If we're not in an active normal cleanup scope, or if the
890   // destination scope is within the innermost active normal cleanup
891   // scope, we don't need to worry about fixups.
892   if (TopCleanup == EHStack.stable_end() ||
893       TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
894     return true;
895
896   // Otherwise, we might need some cleanups.
897   return false;
898 }
899
900
901 /// Terminate the current block by emitting a branch which might leave
902 /// the current cleanup-protected scope.  The target scope may not yet
903 /// be known, in which case this will require a fixup.
904 ///
905 /// As a side-effect, this method clears the insertion point.
906 void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
907   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
908          && "stale jump destination");
909
910   if (!HaveInsertPoint())
911     return;
912
913   // Create the branch.
914   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
915
916   // Calculate the innermost active normal cleanup.
917   EHScopeStack::stable_iterator
918     TopCleanup = EHStack.getInnermostActiveNormalCleanup();
919
920   // If we're not in an active normal cleanup scope, or if the
921   // destination scope is within the innermost active normal cleanup
922   // scope, we don't need to worry about fixups.
923   if (TopCleanup == EHStack.stable_end() ||
924       TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
925     Builder.ClearInsertionPoint();
926     return;
927   }
928
929   // If we can't resolve the destination cleanup scope, just add this
930   // to the current cleanup scope as a branch fixup.
931   if (!Dest.getScopeDepth().isValid()) {
932     BranchFixup &Fixup = EHStack.addBranchFixup();
933     Fixup.Destination = Dest.getBlock();
934     Fixup.DestinationIndex = Dest.getDestIndex();
935     Fixup.InitialBranch = BI;
936     Fixup.OptimisticBranchBlock = 0;
937
938     Builder.ClearInsertionPoint();
939     return;
940   }
941
942   // Otherwise, thread through all the normal cleanups in scope.
943
944   // Store the index at the start.
945   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
946   new llvm::StoreInst(Index, getNormalCleanupDestSlot(), BI);
947
948   // Adjust BI to point to the first cleanup block.
949   {
950     EHCleanupScope &Scope =
951       cast<EHCleanupScope>(*EHStack.find(TopCleanup));
952     BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
953   }
954
955   // Add this destination to all the scopes involved.
956   EHScopeStack::stable_iterator I = TopCleanup;
957   EHScopeStack::stable_iterator E = Dest.getScopeDepth();
958   if (E.strictlyEncloses(I)) {
959     while (true) {
960       EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
961       assert(Scope.isNormalCleanup());
962       I = Scope.getEnclosingNormalCleanup();
963
964       // If this is the last cleanup we're propagating through, tell it
965       // that there's a resolved jump moving through it.
966       if (!E.strictlyEncloses(I)) {
967         Scope.addBranchAfter(Index, Dest.getBlock());
968         break;
969       }
970
971       // Otherwise, tell the scope that there's a jump propoagating
972       // through it.  If this isn't new information, all the rest of
973       // the work has been done before.
974       if (!Scope.addBranchThrough(Dest.getBlock()))
975         break;
976     }
977   }
978   
979   Builder.ClearInsertionPoint();
980 }
981
982 void CodeGenFunction::EmitBranchThroughEHCleanup(UnwindDest Dest) {
983   // We should never get invalid scope depths for an UnwindDest; that
984   // implies that the destination wasn't set up correctly.
985   assert(Dest.getScopeDepth().isValid() && "invalid scope depth on EH dest?");
986
987   if (!HaveInsertPoint())
988     return;
989
990   // Create the branch.
991   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
992
993   // Calculate the innermost active cleanup.
994   EHScopeStack::stable_iterator
995     InnermostCleanup = EHStack.getInnermostActiveEHCleanup();
996
997   // If the destination is in the same EH cleanup scope as us, we
998   // don't need to thread through anything.
999   if (InnermostCleanup.encloses(Dest.getScopeDepth())) {
1000     Builder.ClearInsertionPoint();
1001     return;
1002   }
1003   assert(InnermostCleanup != EHStack.stable_end());
1004
1005   // Store the index at the start.
1006   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1007   new llvm::StoreInst(Index, getEHCleanupDestSlot(), BI);
1008
1009   // Adjust BI to point to the first cleanup block.
1010   {
1011     EHCleanupScope &Scope =
1012       cast<EHCleanupScope>(*EHStack.find(InnermostCleanup));
1013     BI->setSuccessor(0, CreateEHEntry(*this, Scope));
1014   }
1015   
1016   // Add this destination to all the scopes involved.
1017   for (EHScopeStack::stable_iterator
1018          I = InnermostCleanup, E = Dest.getScopeDepth(); ; ) {
1019     assert(E.strictlyEncloses(I));
1020     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1021     assert(Scope.isEHCleanup());
1022     I = Scope.getEnclosingEHCleanup();
1023
1024     // If this is the last cleanup we're propagating through, add this
1025     // as a branch-after.
1026     if (I == E) {
1027       Scope.addEHBranchAfter(Index, Dest.getBlock());
1028       break;
1029     }
1030
1031     // Otherwise, add it as a branch-through.  If this isn't new
1032     // information, all the rest of the work has been done before.
1033     if (!Scope.addEHBranchThrough(Dest.getBlock()))
1034       break;
1035   }
1036   
1037   Builder.ClearInsertionPoint();
1038 }
1039
1040 static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
1041                                   EHScopeStack::stable_iterator C) {
1042   // If we needed a normal block for any reason, that counts.
1043   if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1044     return true;
1045
1046   // Check whether any enclosed cleanups were needed.
1047   for (EHScopeStack::stable_iterator
1048          I = EHStack.getInnermostNormalCleanup();
1049          I != C; ) {
1050     assert(C.strictlyEncloses(I));
1051     EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1052     if (S.getNormalBlock()) return true;
1053     I = S.getEnclosingNormalCleanup();
1054   }
1055
1056   return false;
1057 }
1058
1059 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1060                               EHScopeStack::stable_iterator C) {
1061   // If we needed an EH block for any reason, that counts.
1062   if (cast<EHCleanupScope>(*EHStack.find(C)).getEHBlock())
1063     return true;
1064
1065   // Check whether any enclosed cleanups were needed.
1066   for (EHScopeStack::stable_iterator
1067          I = EHStack.getInnermostEHCleanup(); I != C; ) {
1068     assert(C.strictlyEncloses(I));
1069     EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1070     if (S.getEHBlock()) return true;
1071     I = S.getEnclosingEHCleanup();
1072   }
1073
1074   return false;
1075 }
1076
1077 enum ForActivation_t {
1078   ForActivation,
1079   ForDeactivation
1080 };
1081
1082 /// The given cleanup block is changing activation state.  Configure a
1083 /// cleanup variable if necessary.
1084 ///
1085 /// It would be good if we had some way of determining if there were
1086 /// extra uses *after* the change-over point.
1087 static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1088                                         EHScopeStack::stable_iterator C,
1089                                         ForActivation_t Kind) {
1090   EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1091
1092   // We always need the flag if we're activating the cleanup, because
1093   // we have to assume that the current location doesn't necessarily
1094   // dominate all future uses of the cleanup.
1095   bool NeedFlag = (Kind == ForActivation);
1096
1097   // Calculate whether the cleanup was used:
1098
1099   //   - as a normal cleanup
1100   if (Scope.isNormalCleanup() && IsUsedAsNormalCleanup(CGF.EHStack, C)) {
1101     Scope.setTestFlagInNormalCleanup();
1102     NeedFlag = true;
1103   }
1104
1105   //  - as an EH cleanup
1106   if (Scope.isEHCleanup() && IsUsedAsEHCleanup(CGF.EHStack, C)) {
1107     Scope.setTestFlagInEHCleanup();
1108     NeedFlag = true;
1109   }
1110
1111   // If it hasn't yet been used as either, we're done.
1112   if (!NeedFlag) return;
1113
1114   llvm::AllocaInst *Var = Scope.getActiveFlag();
1115   if (!Var) {
1116     Var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), "cleanup.isactive");
1117     Scope.setActiveFlag(Var);
1118
1119     // Initialize to true or false depending on whether it was
1120     // active up to this point.
1121     CGF.InitTempAlloca(Var, CGF.Builder.getInt1(Kind == ForDeactivation));
1122   }
1123
1124   CGF.Builder.CreateStore(CGF.Builder.getInt1(Kind == ForActivation), Var);
1125 }
1126
1127 /// Activate a cleanup that was created in an inactivated state.
1128 void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C) {
1129   assert(C != EHStack.stable_end() && "activating bottom of stack?");
1130   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1131   assert(!Scope.isActive() && "double activation");
1132
1133   SetupCleanupBlockActivation(*this, C, ForActivation);
1134
1135   Scope.setActive(true);
1136 }
1137
1138 /// Deactive a cleanup that was created in an active state.
1139 void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C) {
1140   assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1141   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1142   assert(Scope.isActive() && "double deactivation");
1143
1144   // If it's the top of the stack, just pop it.
1145   if (C == EHStack.stable_begin()) {
1146     // If it's a normal cleanup, we need to pretend that the
1147     // fallthrough is unreachable.
1148     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1149     PopCleanupBlock();
1150     Builder.restoreIP(SavedIP);
1151     return;
1152   }
1153
1154   // Otherwise, follow the general case.
1155   SetupCleanupBlockActivation(*this, C, ForDeactivation);
1156
1157   Scope.setActive(false);
1158 }
1159
1160 llvm::Value *CodeGenFunction::getNormalCleanupDestSlot() {
1161   if (!NormalCleanupDest)
1162     NormalCleanupDest =
1163       CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1164   return NormalCleanupDest;
1165 }
1166
1167 llvm::Value *CodeGenFunction::getEHCleanupDestSlot() {
1168   if (!EHCleanupDest)
1169     EHCleanupDest =
1170       CreateTempAlloca(Builder.getInt32Ty(), "eh.cleanup.dest.slot");
1171   return EHCleanupDest;
1172 }