1//===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains code dealing with the IR generation for cleanups
10// and related information.
11//
12// A "cleanup" is a piece of code which needs to be executed whenever
13// control transfers out of a particular scope. This can be
14// conditionalized to occur only on exceptional control flow, only on
15// normal control flow, or both.
16//
17//===----------------------------------------------------------------------===//
18
19#include "CGCleanup.h"
20#include "CodeGenFunction.h"
21#include "llvm/Support/SaveAndRestore.h"
22
23using namespace clang;
24using namespace CodeGen;
25
26bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27 if (rv.isScalar())
28 return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29 if (rv.isAggregate())
30 return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
31 return true;
32}
33
34DominatingValue<RValue>::saved_type
35DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36 if (rv.isScalar()) {
37 llvm::Value *V = rv.getScalarVal();
38
39 // These automatically dominate and don't need to be saved.
40 if (!DominatingLLVMValue::needsSaving(V))
41 return saved_type(V, ScalarLiteral);
42
43 // Everything else needs an alloca.
44 Address addr =
45 CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
46 CGF.Builder.CreateStore(V, addr);
47 return saved_type(addr.getPointer(), ScalarAddress);
48 }
49
50 if (rv.isComplex()) {
51 CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
52 llvm::Type *ComplexTy =
53 llvm::StructType::get(V.first->getType(), V.second->getType());
54 Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
55 CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0));
56 CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1));
57 return saved_type(addr.getPointer(), ComplexAddress);
58 }
59
60 assert(rv.isAggregate());
61 Address V = rv.getAggregateAddress(); // TODO: volatile?
62 if (!DominatingLLVMValue::needsSaving(V.getPointer()))
63 return saved_type(V.getPointer(), AggregateLiteral,
64 V.getAlignment().getQuantity());
65
66 Address addr =
67 CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
68 CGF.Builder.CreateStore(V.getPointer(), addr);
69 return saved_type(addr.getPointer(), AggregateAddress,
70 V.getAlignment().getQuantity());
71}
72
73/// Given a saved r-value produced by SaveRValue, perform the code
74/// necessary to restore it to usability at the current insertion
75/// point.
76RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
77 auto getSavingAddress = [&](llvm::Value *value) {
78 auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
79 return Address(value, CharUnits::fromQuantity(alignment));
80 };
81 switch (K) {
82 case ScalarLiteral:
83 return RValue::get(Value);
84 case ScalarAddress:
85 return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
86 case AggregateLiteral:
87 return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align)));
88 case AggregateAddress: {
89 auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
90 return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align)));
91 }
92 case ComplexAddress: {
93 Address address = getSavingAddress(Value);
94 llvm::Value *real =
95 CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 0));
96 llvm::Value *imag =
97 CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 1));
98 return RValue::getComplex(real, imag);
99 }
100 }
101
102 llvm_unreachable("bad saved r-value kind");
103}
104
105/// Push an entry of the given size onto this protected-scope stack.
106char *EHScopeStack::allocate(size_t Size) {
107 Size = llvm::alignTo(Size, ScopeStackAlignment);
108 if (!StartOfBuffer) {
109 unsigned Capacity = 1024;
110 while (Capacity < Size) Capacity *= 2;
111 StartOfBuffer = new char[Capacity];
112 StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
113 } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
114 unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
115 unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
116
117 unsigned NewCapacity = CurrentCapacity;
118 do {
119 NewCapacity *= 2;
120 } while (NewCapacity < UsedCapacity + Size);
121
122 char *NewStartOfBuffer = new char[NewCapacity];
123 char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
124 char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
125 memcpy(NewStartOfData, StartOfData, UsedCapacity);
126 delete [] StartOfBuffer;
127 StartOfBuffer = NewStartOfBuffer;
128 EndOfBuffer = NewEndOfBuffer;
129 StartOfData = NewStartOfData;
130 }
131
132 assert(StartOfBuffer + Size <= StartOfData);
133 StartOfData -= Size;
134 return StartOfData;
135}
136
137void EHScopeStack::deallocate(size_t Size) {
138 StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
139}
140
141bool EHScopeStack::containsOnlyLifetimeMarkers(
142 EHScopeStack::stable_iterator Old) const {
143 for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
144 EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
145 if (!cleanup || !cleanup->isLifetimeMarker())
146 return false;
147 }
148
149 return true;
150}
151
152bool EHScopeStack::requiresLandingPad() const {
153 for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
154 // Skip lifetime markers.
155 if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
156 if (cleanup->isLifetimeMarker()) {
157 si = cleanup->getEnclosingEHScope();
158 continue;
159 }
160 return true;
161 }
162
163 return false;
164}
165
166EHScopeStack::stable_iterator
167EHScopeStack::getInnermostActiveNormalCleanup() const {
168 for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
169 si != se; ) {
170 EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
171 if (cleanup.isActive()) return si;
172 si = cleanup.getEnclosingNormalCleanup();
173 }
174 return stable_end();
175}
176
177
178void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
179 char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
180 bool IsNormalCleanup = Kind & NormalCleanup;
181 bool IsEHCleanup = Kind & EHCleanup;
182 bool IsLifetimeMarker = Kind & LifetimeMarker;
183 EHCleanupScope *Scope =
184 new (Buffer) EHCleanupScope(IsNormalCleanup,
185 IsEHCleanup,
186 Size,
187 BranchFixups.size(),
188 InnermostNormalCleanup,
189 InnermostEHScope);
190 if (IsNormalCleanup)
191 InnermostNormalCleanup = stable_begin();
192 if (IsEHCleanup)
193 InnermostEHScope = stable_begin();
194 if (IsLifetimeMarker)
195 Scope->setLifetimeMarker();
196
197 return Scope->getCleanupBuffer();
198}
199
200void EHScopeStack::popCleanup() {
201 assert(!empty() && "popping exception stack when not empty");
202
203 assert(isa<EHCleanupScope>(*begin()));
204 EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
205 InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
206 InnermostEHScope = Cleanup.getEnclosingEHScope();
207 deallocate(Cleanup.getAllocatedSize());
208
209 // Destroy the cleanup.
210 Cleanup.Destroy();
211
212 // Check whether we can shrink the branch-fixups stack.
213 if (!BranchFixups.empty()) {
214 // If we no longer have any normal cleanups, all the fixups are
215 // complete.
216 if (!hasNormalCleanups())
217 BranchFixups.clear();
218
219 // Otherwise we can still trim out unnecessary nulls.
220 else
221 popNullFixups();
222 }
223}
224
225EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
226 assert(getInnermostEHScope() == stable_end());
227 char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
228 EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
229 InnermostEHScope = stable_begin();
230 return filter;
231}
232
233void EHScopeStack::popFilter() {
234 assert(!empty() && "popping exception stack when not empty");
235
236 EHFilterScope &filter = cast<EHFilterScope>(*begin());
237 deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
238
239 InnermostEHScope = filter.getEnclosingEHScope();
240}
241
242EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
243 char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
244 EHCatchScope *scope =
245 new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
246 InnermostEHScope = stable_begin();
247 return scope;
248}
249
250void EHScopeStack::pushTerminate() {
251 char *Buffer = allocate(EHTerminateScope::getSize());
252 new (Buffer) EHTerminateScope(InnermostEHScope);
253 InnermostEHScope = stable_begin();
254}
255
256/// Remove any 'null' fixups on the stack. However, we can't pop more
257/// fixups than the fixup depth on the innermost normal cleanup, or
258/// else fixups that we try to add to that cleanup will end up in the
259/// wrong place. We *could* try to shrink fixup depths, but that's
260/// actually a lot of work for little benefit.
261void EHScopeStack::popNullFixups() {
262 // We expect this to only be called when there's still an innermost
263 // normal cleanup; otherwise there really shouldn't be any fixups.
264 assert(hasNormalCleanups());
265
266 EHScopeStack::iterator it = find(InnermostNormalCleanup);
267 unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
268 assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
269
270 while (BranchFixups.size() > MinSize &&
271 BranchFixups.back().Destination == nullptr)
272 BranchFixups.pop_back();
273}
274
275Address CodeGenFunction::createCleanupActiveFlag() {
276 // Create a variable to decide whether the cleanup needs to be run.
277 Address active = CreateTempAllocaWithoutCast(
278 Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
279
280 // Initialize it to false at a site that's guaranteed to be run
281 // before each evaluation.
282 setBeforeOutermostConditional(Builder.getFalse(), active);
283
284 // Initialize it to true at the current location.
285 Builder.CreateStore(Builder.getTrue(), active);
286
287 return active;
288}
289
290void CodeGenFunction::initFullExprCleanupWithFlag(Address ActiveFlag) {
291 // Set that as the active flag in the cleanup.
292 EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
293 assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
294 cleanup.setActiveFlag(ActiveFlag);
295
296 if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
297 if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
298}
299
300void EHScopeStack::Cleanup::anchor() {}
301
302static void createStoreInstBefore(llvm::Value *value, Address addr,
303 llvm::Instruction *beforeInst) {
304 auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
305 store->setAlignment(addr.getAlignment().getAsAlign());
306}
307
308static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
309 llvm::Instruction *beforeInst) {
310 return new llvm::LoadInst(addr.getElementType(), addr.getPointer(), name,
311 false, addr.getAlignment().getAsAlign(),
312 beforeInst);
313}
314
315/// All the branch fixups on the EH stack have propagated out past the
316/// outermost normal cleanup; resolve them all by adding cases to the
317/// given switch instruction.
318static void ResolveAllBranchFixups(CodeGenFunction &CGF,
319 llvm::SwitchInst *Switch,
320 llvm::BasicBlock *CleanupEntry) {
321 llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
322
323 for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
324 // Skip this fixup if its destination isn't set.
325 BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
326 if (Fixup.Destination == nullptr) continue;
327
328 // If there isn't an OptimisticBranchBlock, then InitialBranch is
329 // still pointing directly to its destination; forward it to the
330 // appropriate cleanup entry. This is required in the specific
331 // case of
332 // { std::string s; goto lbl; }
333 // lbl:
334 // i.e. where there's an unresolved fixup inside a single cleanup
335 // entry which we're currently popping.
336 if (Fixup.OptimisticBranchBlock == nullptr) {
337 createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
338 CGF.getNormalCleanupDestSlot(),
339 Fixup.InitialBranch);
340 Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
341 }
342
343 // Don't add this case to the switch statement twice.
344 if (!CasesAdded.insert(Fixup.Destination).second)
345 continue;
346
347 Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
348 Fixup.Destination);
349 }
350
351 CGF.EHStack.clearFixups();
352}
353
354/// Transitions the terminator of the given exit-block of a cleanup to
355/// be a cleanup switch.
356static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
357 llvm::BasicBlock *Block) {
358 // If it's a branch, turn it into a switch whose default
359 // destination is its original target.
360 llvm::Instruction *Term = Block->getTerminator();
361 assert(Term && "can't transition block without terminator");
362
363 if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
364 assert(Br->isUnconditional());
365 auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
366 "cleanup.dest", Term);
367 llvm::SwitchInst *Switch =
368 llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
369 Br->eraseFromParent();
370 return Switch;
371 } else {
372 return cast<llvm::SwitchInst>(Term);
373 }
374}
375
376void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
377 assert(Block && "resolving a null target block");
378 if (!EHStack.getNumBranchFixups()) return;
379
380 assert(EHStack.hasNormalCleanups() &&
381 "branch fixups exist with no normal cleanups on stack");
382
383 llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
384 bool ResolvedAny = false;
385
386 for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
387 // Skip this fixup if its destination doesn't match.
388 BranchFixup &Fixup = EHStack.getBranchFixup(I);
389 if (Fixup.Destination != Block) continue;
390
391 Fixup.Destination = nullptr;
392 ResolvedAny = true;
393
394 // If it doesn't have an optimistic branch block, LatestBranch is
395 // already pointing to the right place.
396 llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
397 if (!BranchBB)
398 continue;
399
400 // Don't process the same optimistic branch block twice.
401 if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
402 continue;
403
404 llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
405
406 // Add a case to the switch.
407 Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
408 }
409
410 if (ResolvedAny)
411 EHStack.popNullFixups();
412}
413
414/// Pops cleanup blocks until the given savepoint is reached.
415void CodeGenFunction::PopCleanupBlocks(
416 EHScopeStack::stable_iterator Old,
417 std::initializer_list<llvm::Value **> ValuesToReload) {
418 assert(Old.isValid());
419
420 bool HadBranches = false;
421 while (EHStack.stable_begin() != Old) {
422 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
423 HadBranches |= Scope.hasBranches();
424
425 // As long as Old strictly encloses the scope's enclosing normal
426 // cleanup, we're going to emit another normal cleanup which
427 // fallthrough can propagate through.
428 bool FallThroughIsBranchThrough =
429 Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
430
431 PopCleanupBlock(FallThroughIsBranchThrough);
432 }
433
434 // If we didn't have any branches, the insertion point before cleanups must
435 // dominate the current insertion point and we don't need to reload any
436 // values.
437 if (!HadBranches)
438 return;
439
440 // Spill and reload all values that the caller wants to be live at the current
441 // insertion point.
442 for (llvm::Value **ReloadedValue : ValuesToReload) {
443 auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
444 if (!Inst)
445 continue;
446
447 // Don't spill static allocas, they dominate all cleanups. These are created
448 // by binding a reference to a local variable or temporary.
449 auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
450 if (AI && AI->isStaticAlloca())
451 continue;
452
453 Address Tmp =
454 CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
455
456 // Find an insertion point after Inst and spill it to the temporary.
457 llvm::BasicBlock::iterator InsertBefore;
458 if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
459 InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
460 else
461 InsertBefore = std::next(Inst->getIterator());
462 CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
463
464 // Reload the value at the current insertion point.
465 *ReloadedValue = Builder.CreateLoad(Tmp);
466 }
467}
468
469/// Pops cleanup blocks until the given savepoint is reached, then add the
470/// cleanups from the given savepoint in the lifetime-extended cleanups stack.
471void CodeGenFunction::PopCleanupBlocks(
472 EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
473 std::initializer_list<llvm::Value **> ValuesToReload) {
474 PopCleanupBlocks(Old, ValuesToReload);
475
476 // Move our deferred cleanups onto the EH stack.
477 for (size_t I = OldLifetimeExtendedSize,
478 E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
479 // Alignment should be guaranteed by the vptrs in the individual cleanups.
480 assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
481 "misaligned cleanup stack entry");
482
483 LifetimeExtendedCleanupHeader &Header =
484 reinterpret_cast<LifetimeExtendedCleanupHeader&>(
485 LifetimeExtendedCleanupStack[I]);
486 I += sizeof(Header);
487
488 EHStack.pushCopyOfCleanup(Header.getKind(),
489 &LifetimeExtendedCleanupStack[I],
490 Header.getSize());
491 I += Header.getSize();
492
493 if (Header.isConditional()) {
494 Address ActiveFlag =
495 reinterpret_cast<Address &>(LifetimeExtendedCleanupStack[I]);
496 initFullExprCleanupWithFlag(ActiveFlag);
497 I += sizeof(ActiveFlag);
498 }
499 }
500 LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
501}
502
503static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
504 EHCleanupScope &Scope) {
505 assert(Scope.isNormalCleanup());
506 llvm::BasicBlock *Entry = Scope.getNormalBlock();
507 if (!Entry) {
508 Entry = CGF.createBasicBlock("cleanup");
509 Scope.setNormalBlock(Entry);
510 }
511 return Entry;
512}
513
514/// Attempts to reduce a cleanup's entry block to a fallthrough. This
515/// is basically llvm::MergeBlockIntoPredecessor, except
516/// simplified/optimized for the tighter constraints on cleanup blocks.
517///
518/// Returns the new block, whatever it is.
519static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
520 llvm::BasicBlock *Entry) {
521 llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
522 if (!Pred) return Entry;
523
524 llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
525 if (!Br || Br->isConditional()) return Entry;
526 assert(Br->getSuccessor(0) == Entry);
527
528 // If we were previously inserting at the end of the cleanup entry
529 // block, we'll need to continue inserting at the end of the
530 // predecessor.
531 bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
532 assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
533
534 // Kill the branch.
535 Br->eraseFromParent();
536
537 // Replace all uses of the entry with the predecessor, in case there
538 // are phis in the cleanup.
539 Entry->replaceAllUsesWith(Pred);
540
541 // Merge the blocks.
542 Pred->getInstList().splice(Pred->end(), Entry->getInstList());
543
544 // Kill the entry block.
545 Entry->eraseFromParent();
546
547 if (WasInsertBlock)
548 CGF.Builder.SetInsertPoint(Pred);
549
550 return Pred;
551}
552
553static void EmitCleanup(CodeGenFunction &CGF,
554 EHScopeStack::Cleanup *Fn,
555 EHScopeStack::Cleanup::Flags flags,
556 Address ActiveFlag) {
557 // If there's an active flag, load it and skip the cleanup if it's
558 // false.
559 llvm::BasicBlock *ContBB = nullptr;
560 if (ActiveFlag.isValid()) {
561 ContBB = CGF.createBasicBlock("cleanup.done");
562 llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
563 llvm::Value *IsActive
564 = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
565 CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
566 CGF.EmitBlock(CleanupBB);
567 }
568
569 // Ask the cleanup to emit itself.
570 Fn->Emit(CGF, flags);
571 assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
572
573 // Emit the continuation block if there was an active flag.
574 if (ActiveFlag.isValid())
575 CGF.EmitBlock(ContBB);
576}
577
578static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
579 llvm::BasicBlock *From,
580 llvm::BasicBlock *To) {
581 // Exit is the exit block of a cleanup, so it always terminates in
582 // an unconditional branch or a switch.
583 llvm::Instruction *Term = Exit->getTerminator();
584
585 if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
586 assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
587 Br->setSuccessor(0, To);
588 } else {
589 llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
590 for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
591 if (Switch->getSuccessor(I) == From)
592 Switch->setSuccessor(I, To);
593 }
594}
595
596/// We don't need a normal entry block for the given cleanup.
597/// Optimistic fixup branches can cause these blocks to come into
598/// existence anyway; if so, destroy it.
599///
600/// The validity of this transformation is very much specific to the
601/// exact ways in which we form branches to cleanup entries.
602static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
603 EHCleanupScope &scope) {
604 llvm::BasicBlock *entry = scope.getNormalBlock();
605 if (!entry) return;
606
607 // Replace all the uses with unreachable.
608 llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
609 for (llvm::BasicBlock::use_iterator
610 i = entry->use_begin(), e = entry->use_end(); i != e; ) {
611 llvm::Use &use = *i;
612 ++i;
613
614 use.set(unreachableBB);
615
616 // The only uses should be fixup switches.
617 llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
618 if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
619 // Replace the switch with a branch.
620 llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
621
622 // The switch operand is a load from the cleanup-dest alloca.
623 llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
624
625 // Destroy the switch.
626 si->eraseFromParent();
627
628 // Destroy the load.
629 assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
630 assert(condition->use_empty());
631 condition->eraseFromParent();
632 }
633 }
634
635 assert(entry->use_empty());
636 delete entry;
637}
638
639/// Pops a cleanup block. If the block includes a normal cleanup, the
640/// current insertion point is threaded through the cleanup, as are
641/// any branch fixups on the cleanup.
642void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
643 assert(!EHStack.empty() && "cleanup stack is empty!");
644 assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
645 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
646 assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
647
648 // Remember activation information.
649 bool IsActive = Scope.isActive();
650 Address NormalActiveFlag =
651 Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
652 : Address::invalid();
653 Address EHActiveFlag =
654 Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
655 : Address::invalid();
656
657 // Check whether we need an EH cleanup. This is only true if we've
658 // generated a lazy EH cleanup block.
659 llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
660 assert(Scope.hasEHBranches() == (EHEntry != nullptr));
661 bool RequiresEHCleanup = (EHEntry != nullptr);
662 EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
663
664 // Check the three conditions which might require a normal cleanup:
665
666 // - whether there are branch fix-ups through this cleanup
667 unsigned FixupDepth = Scope.getFixupDepth();
668 bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
669
670 // - whether there are branch-throughs or branch-afters
671 bool HasExistingBranches = Scope.hasBranches();
672
673 // - whether there's a fallthrough
674 llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
675 bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
676
677 // Branch-through fall-throughs leave the insertion point set to the
678 // end of the last cleanup, which points to the current scope. The
679 // rest of IR gen doesn't need to worry about this; it only happens
680 // during the execution of PopCleanupBlocks().
681 bool HasPrebranchedFallthrough =
682 (FallthroughSource && FallthroughSource->getTerminator());
683
684 // If this is a normal cleanup, then having a prebranched
685 // fallthrough implies that the fallthrough source unconditionally
686 // jumps here.
687 assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
688 (Scope.getNormalBlock() &&
689 FallthroughSource->getTerminator()->getSuccessor(0)
690 == Scope.getNormalBlock()));
691
692 bool RequiresNormalCleanup = false;
693 if (Scope.isNormalCleanup() &&
694 (HasFixups || HasExistingBranches || HasFallthrough)) {
695 RequiresNormalCleanup = true;
696 }
697
698 // If we have a prebranched fallthrough into an inactive normal
699 // cleanup, rewrite it so that it leads to the appropriate place.
700 if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
701 llvm::BasicBlock *prebranchDest;
702
703 // If the prebranch is semantically branching through the next
704 // cleanup, just forward it to the next block, leaving the
705 // insertion point in the prebranched block.
706 if (FallthroughIsBranchThrough) {
707 EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
708 prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
709
710 // Otherwise, we need to make a new block. If the normal cleanup
711 // isn't being used at all, we could actually reuse the normal
712 // entry block, but this is simpler, and it avoids conflicts with
713 // dead optimistic fixup branches.
714 } else {
715 prebranchDest = createBasicBlock("forwarded-prebranch");
716 EmitBlock(prebranchDest);
717 }
718
719 llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
720 assert(normalEntry && !normalEntry->use_empty());
721
722 ForwardPrebranchedFallthrough(FallthroughSource,
723 normalEntry, prebranchDest);
724 }
725
726 // If we don't need the cleanup at all, we're done.
727 if (!RequiresNormalCleanup && !RequiresEHCleanup) {
728 destroyOptimisticNormalEntry(*this, Scope);
729 EHStack.popCleanup(); // safe because there are no fixups
730 assert(EHStack.getNumBranchFixups() == 0 ||
731 EHStack.hasNormalCleanups());
732 return;
733 }
734
735 // Copy the cleanup emission data out. This uses either a stack
736 // array or malloc'd memory, depending on the size, which is
737 // behavior that SmallVector would provide, if we could use it
738 // here. Unfortunately, if you ask for a SmallVector<char>, the
739 // alignment isn't sufficient.
740 auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
741 alignas(EHScopeStack::ScopeStackAlignment) char
742 CleanupBufferStack[8 * sizeof(void *)];
743 std::unique_ptr<char[]> CleanupBufferHeap;
744 size_t CleanupSize = Scope.getCleanupSize();
745 EHScopeStack::Cleanup *Fn;
746
747 if (CleanupSize <= sizeof(CleanupBufferStack)) {
748 memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
749 Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
750 } else {
751 CleanupBufferHeap.reset(new char[CleanupSize]);
752 memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
753 Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
754 }
755
756 EHScopeStack::Cleanup::Flags cleanupFlags;
757 if (Scope.isNormalCleanup())
758 cleanupFlags.setIsNormalCleanupKind();
759 if (Scope.isEHCleanup())
760 cleanupFlags.setIsEHCleanupKind();
761
762 if (!RequiresNormalCleanup) {
763 destroyOptimisticNormalEntry(*this, Scope);
764 EHStack.popCleanup();
765 } else {
766 // If we have a fallthrough and no other need for the cleanup,
767 // emit it directly.
768 if (HasFallthrough && !HasPrebranchedFallthrough &&
769 !HasFixups && !HasExistingBranches) {
770
771 destroyOptimisticNormalEntry(*this, Scope);
772 EHStack.popCleanup();
773
774 EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
775
776 // Otherwise, the best approach is to thread everything through
777 // the cleanup block and then try to clean up after ourselves.
778 } else {
779 // Force the entry block to exist.
780 llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
781
782 // I. Set up the fallthrough edge in.
783
784 CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
785
786 // If there's a fallthrough, we need to store the cleanup
787 // destination index. For fall-throughs this is always zero.
788 if (HasFallthrough) {
789 if (!HasPrebranchedFallthrough)
790 Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
791
792 // Otherwise, save and clear the IP if we don't have fallthrough
793 // because the cleanup is inactive.
794 } else if (FallthroughSource) {
795 assert(!IsActive && "source without fallthrough for active cleanup");
796 savedInactiveFallthroughIP = Builder.saveAndClearIP();
797 }
798
799 // II. Emit the entry block. This implicitly branches to it if
800 // we have fallthrough. All the fixups and existing branches
801 // should already be branched to it.
802 EmitBlock(NormalEntry);
803
804 // III. Figure out where we're going and build the cleanup
805 // epilogue.
806
807 bool HasEnclosingCleanups =
808 (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
809
810 // Compute the branch-through dest if we need it:
811 // - if there are branch-throughs threaded through the scope
812 // - if fall-through is a branch-through
813 // - if there are fixups that will be optimistically forwarded
814 // to the enclosing cleanup
815 llvm::BasicBlock *BranchThroughDest = nullptr;
816 if (Scope.hasBranchThroughs() ||
817 (FallthroughSource && FallthroughIsBranchThrough) ||
818 (HasFixups && HasEnclosingCleanups)) {
819 assert(HasEnclosingCleanups);
820 EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
821 BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
822 }
823
824 llvm::BasicBlock *FallthroughDest = nullptr;
825 SmallVector<llvm::Instruction*, 2> InstsToAppend;
826
827 // If there's exactly one branch-after and no other threads,
828 // we can route it without a switch.
829 if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
830 Scope.getNumBranchAfters() == 1) {
831 assert(!BranchThroughDest || !IsActive);
832
833 // Clean up the possibly dead store to the cleanup dest slot.
834 llvm::Instruction *NormalCleanupDestSlot =
835 cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
836 if (NormalCleanupDestSlot->hasOneUse()) {
837 NormalCleanupDestSlot->user_back()->eraseFromParent();
838 NormalCleanupDestSlot->eraseFromParent();
839 NormalCleanupDest = Address::invalid();
840 }
841
842 llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
843 InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
844
845 // Build a switch-out if we need it:
846 // - if there are branch-afters threaded through the scope
847 // - if fall-through is a branch-after
848 // - if there are fixups that have nowhere left to go and
849 // so must be immediately resolved
850 } else if (Scope.getNumBranchAfters() ||
851 (HasFallthrough && !FallthroughIsBranchThrough) ||
852 (HasFixups && !HasEnclosingCleanups)) {
853
854 llvm::BasicBlock *Default =
855 (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
856
857 // TODO: base this on the number of branch-afters and fixups
858 const unsigned SwitchCapacity = 10;
859
860 // pass the abnormal exit flag to Fn (SEH cleanup)
861 cleanupFlags.setHasExitSwitch();
862
863 llvm::LoadInst *Load =
864 createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
865 nullptr);
866 llvm::SwitchInst *Switch =
867 llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
868
869 InstsToAppend.push_back(Load);
870 InstsToAppend.push_back(Switch);
871
872 // Branch-after fallthrough.
873 if (FallthroughSource && !FallthroughIsBranchThrough) {
874 FallthroughDest = createBasicBlock("cleanup.cont");
875 if (HasFallthrough)
876 Switch->addCase(Builder.getInt32(0), FallthroughDest);
877 }
878
879 for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
880 Switch->addCase(Scope.getBranchAfterIndex(I),
881 Scope.getBranchAfterBlock(I));
882 }
883
884 // If there aren't any enclosing cleanups, we can resolve all
885 // the fixups now.
886 if (HasFixups && !HasEnclosingCleanups)
887 ResolveAllBranchFixups(*this, Switch, NormalEntry);
888 } else {
889 // We should always have a branch-through destination in this case.
890 assert(BranchThroughDest);
891 InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
892 }
893
894 // IV. Pop the cleanup and emit it.
895 EHStack.popCleanup();
896 assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
897
898 EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
899
900 // Append the prepared cleanup prologue from above.
901 llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
902 for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
903 NormalExit->getInstList().push_back(InstsToAppend[I]);
904
905 // Optimistically hope that any fixups will continue falling through.
906 for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
907 I < E; ++I) {
908 BranchFixup &Fixup = EHStack.getBranchFixup(I);
909 if (!Fixup.Destination) continue;
910 if (!Fixup.OptimisticBranchBlock) {
911 createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
912 getNormalCleanupDestSlot(),
913 Fixup.InitialBranch);
914 Fixup.InitialBranch->setSuccessor(0, NormalEntry);
915 }
916 Fixup.OptimisticBranchBlock = NormalExit;
917 }
918
919 // V. Set up the fallthrough edge out.
920
921 // Case 1: a fallthrough source exists but doesn't branch to the
922 // cleanup because the cleanup is inactive.
923 if (!HasFallthrough && FallthroughSource) {
924 // Prebranched fallthrough was forwarded earlier.
925 // Non-prebranched fallthrough doesn't need to be forwarded.
926 // Either way, all we need to do is restore the IP we cleared before.
927 assert(!IsActive);
928 Builder.restoreIP(savedInactiveFallthroughIP);
929
930 // Case 2: a fallthrough source exists and should branch to the
931 // cleanup, but we're not supposed to branch through to the next
932 // cleanup.
933 } else if (HasFallthrough && FallthroughDest) {
934 assert(!FallthroughIsBranchThrough);
935 EmitBlock(FallthroughDest);
936
937 // Case 3: a fallthrough source exists and should branch to the
938 // cleanup and then through to the next.
939 } else if (HasFallthrough) {
940 // Everything is already set up for this.
941
942 // Case 4: no fallthrough source exists.
943 } else {
944 Builder.ClearInsertionPoint();
945 }
946
947 // VI. Assorted cleaning.
948
949 // Check whether we can merge NormalEntry into a single predecessor.
950 // This might invalidate (non-IR) pointers to NormalEntry.
951 llvm::BasicBlock *NewNormalEntry =
952 SimplifyCleanupEntry(*this, NormalEntry);
953
954 // If it did invalidate those pointers, and NormalEntry was the same
955 // as NormalExit, go back and patch up the fixups.
956 if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
957 for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
958 I < E; ++I)
959 EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
960 }
961 }
962
963 assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
964
965 // Emit the EH cleanup if required.
966 if (RequiresEHCleanup) {
967 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
968
969 EmitBlock(EHEntry);
970
971 llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
972
973 // Push a terminate scope or cleanupendpad scope around the potentially
974 // throwing cleanups. For funclet EH personalities, the cleanupendpad models
975 // program termination when cleanups throw.
976 bool PushedTerminate = false;
977 SaveAndRestore<llvm::Instruction *> RestoreCurrentFuncletPad(
978 CurrentFuncletPad);
979 llvm::CleanupPadInst *CPI = nullptr;
980
981 const EHPersonality &Personality = EHPersonality::get(*this);
982 if (Personality.usesFuncletPads()) {
983 llvm::Value *ParentPad = CurrentFuncletPad;
984 if (!ParentPad)
985 ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
986 CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
987 }
988
989 // Non-MSVC personalities need to terminate when an EH cleanup throws.
990 if (!Personality.isMSVCPersonality()) {
991 EHStack.pushTerminate();
992 PushedTerminate = true;
993 }
994
995 // We only actually emit the cleanup code if the cleanup is either
996 // active or was used before it was deactivated.
997 if (EHActiveFlag.isValid() || IsActive) {
998 cleanupFlags.setIsForEHCleanup();
999 EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
1000 }
1001
1002 if (CPI)
1003 Builder.CreateCleanupRet(CPI, NextAction);
1004 else
1005 Builder.CreateBr(NextAction);
1006
1007 // Leave the terminate scope.
1008 if (PushedTerminate)
1009 EHStack.popTerminate();
1010
1011 Builder.restoreIP(SavedIP);
1012
1013 SimplifyCleanupEntry(*this, EHEntry);
1014 }
1015}
1016
1017/// isObviouslyBranchWithoutCleanups - Return true if a branch to the
1018/// specified destination obviously has no cleanups to run. 'false' is always
1019/// a conservatively correct answer for this method.
1020bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
1021 assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1022 && "stale jump destination");
1023
1024 // Calculate the innermost active normal cleanup.
1025 EHScopeStack::stable_iterator TopCleanup =
1026 EHStack.getInnermostActiveNormalCleanup();
1027
1028 // If we're not in an active normal cleanup scope, or if the
1029 // destination scope is within the innermost active normal cleanup
1030 // scope, we don't need to worry about fixups.
1031 if (TopCleanup == EHStack.stable_end() ||
1032 TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
1033 return true;
1034
1035 // Otherwise, we might need some cleanups.
1036 return false;
1037}
1038
1039
1040/// Terminate the current block by emitting a branch which might leave
1041/// the current cleanup-protected scope. The target scope may not yet
1042/// be known, in which case this will require a fixup.
1043///
1044/// As a side-effect, this method clears the insertion point.
1045void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
1046 assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1047 && "stale jump destination");
1048
1049 if (!HaveInsertPoint())
1050 return;
1051
1052 // Create the branch.
1053 llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
1054
1055 // Calculate the innermost active normal cleanup.
1056 EHScopeStack::stable_iterator
1057 TopCleanup = EHStack.getInnermostActiveNormalCleanup();
1058
1059 // If we're not in an active normal cleanup scope, or if the
1060 // destination scope is within the innermost active normal cleanup
1061 // scope, we don't need to worry about fixups.
1062 if (TopCleanup == EHStack.stable_end() ||
1063 TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
1064 Builder.ClearInsertionPoint();
1065 return;
1066 }
1067
1068 // If we can't resolve the destination cleanup scope, just add this
1069 // to the current cleanup scope as a branch fixup.
1070 if (!Dest.getScopeDepth().isValid()) {
1071 BranchFixup &Fixup = EHStack.addBranchFixup();
1072 Fixup.Destination = Dest.getBlock();
1073 Fixup.DestinationIndex = Dest.getDestIndex();
1074 Fixup.InitialBranch = BI;
1075 Fixup.OptimisticBranchBlock = nullptr;
1076
1077 Builder.ClearInsertionPoint();
1078 return;
1079 }
1080
1081 // Otherwise, thread through all the normal cleanups in scope.
1082
1083 // Store the index at the start.
1084 llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1085 createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
1086
1087 // Adjust BI to point to the first cleanup block.
1088 {
1089 EHCleanupScope &Scope =
1090 cast<EHCleanupScope>(*EHStack.find(TopCleanup));
1091 BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
1092 }
1093
1094 // Add this destination to all the scopes involved.
1095 EHScopeStack::stable_iterator I = TopCleanup;
1096 EHScopeStack::stable_iterator E = Dest.getScopeDepth();
1097 if (E.strictlyEncloses(I)) {
1098 while (true) {
1099 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1100 assert(Scope.isNormalCleanup());
1101 I = Scope.getEnclosingNormalCleanup();
1102
1103 // If this is the last cleanup we're propagating through, tell it
1104 // that there's a resolved jump moving through it.
1105 if (!E.strictlyEncloses(I)) {
1106 Scope.addBranchAfter(Index, Dest.getBlock());
1107 break;
1108 }
1109
1110 // Otherwise, tell the scope that there's a jump propagating
1111 // through it. If this isn't new information, all the rest of
1112 // the work has been done before.
1113 if (!Scope.addBranchThrough(Dest.getBlock()))
1114 break;
1115 }
1116 }
1117
1118 Builder.ClearInsertionPoint();
1119}
1120
1121static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
1122 EHScopeStack::stable_iterator C) {
1123 // If we needed a normal block for any reason, that counts.
1124 if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1125 return true;
1126
1127 // Check whether any enclosed cleanups were needed.
1128 for (EHScopeStack::stable_iterator
1129 I = EHStack.getInnermostNormalCleanup();
1130 I != C; ) {
1131 assert(C.strictlyEncloses(I));
1132 EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1133 if (S.getNormalBlock()) return true;
1134 I = S.getEnclosingNormalCleanup();
1135 }
1136
1137 return false;
1138}
1139
1140static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1141 EHScopeStack::stable_iterator cleanup) {
1142 // If we needed an EH block for any reason, that counts.
1143 if (EHStack.find(cleanup)->hasEHBranches())
1144 return true;
1145
1146 // Check whether any enclosed cleanups were needed.
1147 for (EHScopeStack::stable_iterator
1148 i = EHStack.getInnermostEHScope(); i != cleanup; ) {
1149 assert(cleanup.strictlyEncloses(i));
1150
1151 EHScope &scope = *EHStack.find(i);
1152 if (scope.hasEHBranches())
1153 return true;
1154
1155 i = scope.getEnclosingEHScope();
1156 }
1157
1158 return false;
1159}
1160
1161enum ForActivation_t {
1162 ForActivation,
1163 ForDeactivation
1164};
1165
1166/// The given cleanup block is changing activation state. Configure a
1167/// cleanup variable if necessary.
1168///
1169/// It would be good if we had some way of determining if there were
1170/// extra uses *after* the change-over point.
1171static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1172 EHScopeStack::stable_iterator C,
1173 ForActivation_t kind,
1174 llvm::Instruction *dominatingIP) {
1175 EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1176
1177 // We always need the flag if we're activating the cleanup in a
1178 // conditional context, because we have to assume that the current
1179 // location doesn't necessarily dominate the cleanup's code.
1180 bool isActivatedInConditional =
1181 (kind == ForActivation && CGF.isInConditionalBranch());
1182
1183 bool needFlag = false;
1184
1185 // Calculate whether the cleanup was used:
1186
1187 // - as a normal cleanup
1188 if (Scope.isNormalCleanup() &&
1189 (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
1190 Scope.setTestFlagInNormalCleanup();
1191 needFlag = true;
1192 }
1193
1194 // - as an EH cleanup
1195 if (Scope.isEHCleanup() &&
1196 (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
1197 Scope.setTestFlagInEHCleanup();
1198 needFlag = true;
1199 }
1200
1201 // If it hasn't yet been used as either, we're done.
1202 if (!needFlag) return;
1203
1204 Address var = Scope.getActiveFlag();
1205 if (!var.isValid()) {
1206 var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
1207 "cleanup.isactive");
1208 Scope.setActiveFlag(var);
1209
1210 assert(dominatingIP && "no existing variable and no dominating IP!");
1211
1212 // Initialize to true or false depending on whether it was
1213 // active up to this point.
1214 llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
1215
1216 // If we're in a conditional block, ignore the dominating IP and
1217 // use the outermost conditional branch.
1218 if (CGF.isInConditionalBranch()) {
1219 CGF.setBeforeOutermostConditional(value, var);
1220 } else {
1221 createStoreInstBefore(value, var, dominatingIP);
1222 }
1223 }
1224
1225 CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
1226}
1227
1228/// Activate a cleanup that was created in an inactivated state.
1229void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
1230 llvm::Instruction *dominatingIP) {
1231 assert(C != EHStack.stable_end() && "activating bottom of stack?");
1232 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1233 assert(!Scope.isActive() && "double activation");
1234
1235 SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
1236
1237 Scope.setActive(true);
1238}
1239
1240/// Deactive a cleanup that was created in an active state.
1241void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
1242 llvm::Instruction *dominatingIP) {
1243 assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1244 EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1245 assert(Scope.isActive() && "double deactivation");
1246
1247 // If it's the top of the stack, just pop it, but do so only if it belongs
1248 // to the current RunCleanupsScope.
1249 if (C == EHStack.stable_begin() &&
1250 CurrentCleanupScopeDepth.strictlyEncloses(C)) {
1251 // If it's a normal cleanup, we need to pretend that the
1252 // fallthrough is unreachable.
1253 CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1254 PopCleanupBlock();
1255 Builder.restoreIP(SavedIP);
1256 return;
1257 }
1258
1259 // Otherwise, follow the general case.
1260 SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
1261
1262 Scope.setActive(false);
1263}
1264
1265Address CodeGenFunction::getNormalCleanupDestSlot() {
1266 if (!NormalCleanupDest.isValid())
1267 NormalCleanupDest =
1268 CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1269 return NormalCleanupDest;
1270}
1271
1272/// Emits all the code to cause the given temporary to be cleaned up.
1273void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
1274 QualType TempType,
1275 Address Ptr) {
1276 pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
1277 /*useEHCleanup*/ true);
1278}
1279