mirror of
https://github.com/apple/swift.git
synced 2025-12-21 12:14:44 +01:00
Adds detection of linearly recursive data structures by finding stored properties that share the type of the class the dealloc is being generated for. Each link will then be deallocated in a loop, while ensuring to keep the next link alive to prevent the recursion. This prevents stack overflows for long chains while also improving performance. rdar://89162954
485 lines
19 KiB
C++
485 lines
19 KiB
C++
//===--- SILGenDestructor.cpp - SILGen for destructors --------------------===//
|
|
//
|
|
// This source file is part of the Swift.org open source project
|
|
//
|
|
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
|
|
// Licensed under Apache License v2.0 with Runtime Library Exception
|
|
//
|
|
// See https://swift.org/LICENSE.txt for license information
|
|
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
|
|
//
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
#include "SILGenFunction.h"
|
|
#include "SILGenFunctionBuilder.h"
|
|
#include "RValue.h"
|
|
#include "ArgumentScope.h"
|
|
#include "llvm/ADT/SmallSet.h"
|
|
#include "swift/AST/GenericSignature.h"
|
|
#include "swift/AST/SubstitutionMap.h"
|
|
#include "swift/SIL/TypeLowering.h"
|
|
|
|
using namespace swift;
|
|
using namespace Lowering;
|
|
|
|
void SILGenFunction::emitDestroyingDestructor(DestructorDecl *dd) {
|
|
MagicFunctionName = DeclName(SGM.M.getASTContext().getIdentifier("deinit"));
|
|
|
|
RegularLocation Loc(dd);
|
|
if (dd->isImplicit())
|
|
Loc.markAutoGenerated();
|
|
|
|
auto cd = cast<ClassDecl>(dd->getDeclContext());
|
|
SILValue selfValue = emitSelfDecl(dd->getImplicitSelfDecl());
|
|
|
|
// Create a basic block to jump to for the implicit destruction behavior
|
|
// of releasing the elements and calling the superclass destructor.
|
|
// We won't actually emit the block until we finish with the destructor body.
|
|
prepareEpilog(None, false, CleanupLocation(Loc));
|
|
|
|
emitProfilerIncrement(dd->getTypecheckedBody());
|
|
// Emit the destructor body.
|
|
emitStmt(dd->getTypecheckedBody());
|
|
|
|
Optional<SILValue> maybeReturnValue;
|
|
SILLocation returnLoc(Loc);
|
|
std::tie(maybeReturnValue, returnLoc) = emitEpilogBB(Loc);
|
|
|
|
if (!maybeReturnValue)
|
|
return;
|
|
|
|
auto cleanupLoc = CleanupLocation(Loc);
|
|
|
|
// If we have a superclass, invoke its destructor.
|
|
SILValue resultSelfValue;
|
|
SILType objectPtrTy = SILType::getNativeObjectType(F.getASTContext());
|
|
SILType classTy = selfValue->getType();
|
|
if (cd->hasSuperclass() && !cd->isNativeNSObjectSubclass()) {
|
|
Type superclassTy =
|
|
dd->mapTypeIntoContext(cd->getSuperclass());
|
|
ClassDecl *superclass = superclassTy->getClassOrBoundGenericClass();
|
|
auto superclassDtorDecl = superclass->getDestructor();
|
|
SILDeclRef dtorConstant =
|
|
SILDeclRef(superclassDtorDecl, SILDeclRef::Kind::Destroyer);
|
|
SILType baseSILTy = getLoweredLoadableType(superclassTy);
|
|
SILValue baseSelf = B.createUpcast(cleanupLoc, selfValue, baseSILTy);
|
|
ManagedValue dtorValue;
|
|
SILType dtorTy;
|
|
auto subMap
|
|
= superclassTy->getContextSubstitutionMap(SGM.M.getSwiftModule(),
|
|
superclass);
|
|
std::tie(dtorValue, dtorTy)
|
|
= emitSiblingMethodRef(cleanupLoc, baseSelf, dtorConstant, subMap);
|
|
|
|
resultSelfValue = B.createApply(cleanupLoc, dtorValue.forward(*this),
|
|
subMap, baseSelf);
|
|
} else {
|
|
resultSelfValue = selfValue;
|
|
}
|
|
|
|
/// A distributed actor resigns its identity as it is deallocated.
|
|
/// This way the transport knows it must not deliver any more messages to it,
|
|
/// and can remove it from its (weak) lookup tables.
|
|
if (cd->isDistributedActor()) {
|
|
SILBasicBlock *continueBB = createBasicBlock();
|
|
|
|
RegularLocation loc(dd);
|
|
if (dd->isImplicit())
|
|
loc.markAutoGenerated();
|
|
|
|
// FIXME: what should the type of management be for this?
|
|
auto managedSelf = ManagedValue::forBorrowedRValue(selfValue);
|
|
emitConditionalResignIdentityCall(loc, cd, managedSelf, continueBB);
|
|
B.emitBlock(continueBB);
|
|
}
|
|
|
|
ArgumentScope S(*this, Loc);
|
|
ManagedValue borrowedValue =
|
|
ManagedValue::forUnmanaged(resultSelfValue).borrow(*this, cleanupLoc);
|
|
|
|
if (classTy != borrowedValue.getType()) {
|
|
borrowedValue =
|
|
B.createUncheckedRefCast(cleanupLoc, borrowedValue, classTy);
|
|
}
|
|
|
|
// Release our members.
|
|
emitClassMemberDestruction(borrowedValue, cd, cleanupLoc);
|
|
|
|
S.pop();
|
|
|
|
if (resultSelfValue->getType() != objectPtrTy) {
|
|
resultSelfValue =
|
|
B.createUncheckedRefCast(cleanupLoc, resultSelfValue, objectPtrTy);
|
|
}
|
|
if (resultSelfValue.getOwnershipKind() != OwnershipKind::Owned) {
|
|
assert(resultSelfValue.getOwnershipKind() == OwnershipKind::Guaranteed);
|
|
resultSelfValue = B.createUncheckedOwnershipConversion(
|
|
cleanupLoc, resultSelfValue, OwnershipKind::Owned);
|
|
}
|
|
B.createReturn(returnLoc, resultSelfValue);
|
|
}
|
|
|
|
void SILGenFunction::emitDeallocatingDestructor(DestructorDecl *dd) {
|
|
MagicFunctionName = DeclName(SGM.M.getASTContext().getIdentifier("deinit"));
|
|
|
|
// The deallocating destructor is always auto-generated.
|
|
RegularLocation loc(dd);
|
|
loc.markAutoGenerated();
|
|
|
|
// Emit the prolog.
|
|
SILValue initialSelfValue = emitSelfDecl(dd->getImplicitSelfDecl());
|
|
|
|
// Form a reference to the destroying destructor.
|
|
SILDeclRef dtorConstant(dd, SILDeclRef::Kind::Destroyer);
|
|
auto classTy = initialSelfValue->getType();
|
|
auto classDecl = classTy.getASTType()->getAnyNominal();
|
|
ManagedValue dtorValue;
|
|
SILType dtorTy;
|
|
auto subMap = classTy.getASTType()
|
|
->getContextSubstitutionMap(SGM.M.getSwiftModule(),
|
|
classDecl);
|
|
std::tie(dtorValue, dtorTy)
|
|
= emitSiblingMethodRef(loc, initialSelfValue, dtorConstant, subMap);
|
|
|
|
// Call the destroying destructor.
|
|
SILValue selfForDealloc;
|
|
{
|
|
FullExpr CleanupScope(Cleanups, CleanupLocation(loc));
|
|
ManagedValue borrowedSelf = emitManagedBeginBorrow(loc, initialSelfValue);
|
|
selfForDealloc = B.createApply(loc, dtorValue.forward(*this), subMap,
|
|
borrowedSelf.getUnmanagedValue());
|
|
}
|
|
|
|
// Balance out the +1 from the self argument using end_lifetime.
|
|
//
|
|
// The issue here is that:
|
|
//
|
|
// 1. Self is passed into deallocating deinits at +1.
|
|
// 2. Destroying deinits take in self as a +0 value that is then returned at
|
|
// +1.
|
|
//
|
|
// This means that the lifetime of self can not be modeled statically in a
|
|
// deallocating deinit without analyzing the body of the destroying deinit
|
|
// (something that violates semantic sil). Thus we add an artificial destroy of
|
|
// self before the actual destroy of self so that the verifier can understand
|
|
// that self is being properly balanced.
|
|
B.createEndLifetime(loc, initialSelfValue);
|
|
|
|
// Deallocate the object.
|
|
selfForDealloc = B.createUncheckedRefCast(loc, selfForDealloc, classTy);
|
|
B.createDeallocRef(loc, selfForDealloc);
|
|
|
|
emitProfilerIncrement(dd->getTypecheckedBody());
|
|
|
|
// Return.
|
|
B.createReturn(loc, emitEmptyTuple(loc));
|
|
}
|
|
|
|
void SILGenFunction::emitIVarDestroyer(SILDeclRef ivarDestroyer) {
|
|
auto cd = cast<ClassDecl>(ivarDestroyer.getDecl());
|
|
RegularLocation loc(cd);
|
|
loc.markAutoGenerated();
|
|
|
|
ManagedValue selfValue = ManagedValue::forUnmanaged(
|
|
emitSelfDecl(cd->getDestructor()->getImplicitSelfDecl()));
|
|
|
|
auto cleanupLoc = CleanupLocation(loc);
|
|
prepareEpilog(None, false, cleanupLoc);
|
|
{
|
|
Scope S(*this, cleanupLoc);
|
|
// Self is effectively guaranteed for the duration of any destructor. For
|
|
// ObjC classes, self may be unowned. A conversion to guaranteed is required
|
|
// to access its members.
|
|
if (selfValue.getOwnershipKind() != OwnershipKind::Guaranteed) {
|
|
// %guaranteedSelf = unchecked_ownership_conversion %self to @guaranteed
|
|
// ...
|
|
// end_borrow %guaranteedSelf
|
|
auto guaranteedSelf = B.createUncheckedOwnershipConversion(
|
|
cleanupLoc, selfValue.forward(*this), OwnershipKind::Guaranteed);
|
|
selfValue = emitManagedBorrowedRValueWithCleanup(guaranteedSelf);
|
|
}
|
|
emitClassMemberDestruction(selfValue, cd, cleanupLoc);
|
|
}
|
|
|
|
B.createReturn(loc, emitEmptyTuple(loc));
|
|
emitEpilog(loc);
|
|
}
|
|
|
|
void SILGenFunction::destroyClassMember(SILLocation cleanupLoc,
|
|
ManagedValue selfValue, VarDecl *D) {
|
|
const TypeLowering &ti = getTypeLowering(D->getType());
|
|
if (!ti.isTrivial()) {
|
|
SILValue addr =
|
|
B.createRefElementAddr(cleanupLoc, selfValue.getValue(), D,
|
|
ti.getLoweredType().getAddressType());
|
|
addr = B.createBeginAccess(
|
|
cleanupLoc, addr, SILAccessKind::Deinit, SILAccessEnforcement::Static,
|
|
false /*noNestedConflict*/, false /*fromBuiltin*/);
|
|
B.createDestroyAddr(cleanupLoc, addr);
|
|
B.createEndAccess(cleanupLoc, addr, false /*is aborting*/);
|
|
}
|
|
}
|
|
|
|
llvm::SmallSetVector<VarDecl*, 4> findRecursiveLinks(DeclContext* DC, ClassDecl *cd) {
|
|
auto SelfTy = DC->mapTypeIntoContext(cd->getDeclaredType());
|
|
|
|
// Collect all stored properties that would form a recursive structure,
|
|
// so we can remove the recursion and prevent the call stack from
|
|
// overflowing.
|
|
llvm::SmallSetVector<VarDecl*, 4> recursiveLinks;
|
|
for (VarDecl *vd : cd->getStoredProperties()) {
|
|
auto Ty = vd->getType()->getOptionalObjectType();
|
|
if (Ty && Ty->getCanonicalType() == SelfTy->getCanonicalType()) {
|
|
recursiveLinks.insert(vd);
|
|
}
|
|
}
|
|
|
|
// NOTE: Right now we only optimize linear recursion, so if there is more than one link,
|
|
// clear out the set and don't perform any recursion optimization.
|
|
if (recursiveLinks.size() < 1) {
|
|
recursiveLinks.clear();
|
|
}
|
|
|
|
return recursiveLinks;
|
|
}
|
|
|
|
|
|
void SILGenFunction::emitRecursiveChainDestruction(ManagedValue selfValue,
|
|
ClassDecl *cd,
|
|
SmallSetVector<VarDecl*, 4> recursiveLinks,
|
|
CleanupLocation cleanupLoc) {
|
|
auto SelfTy = F.mapTypeIntoContext(cd->getDeclaredType());
|
|
|
|
assert(recursiveLinks.size() <= 1 && "Only linear recursion supported.");
|
|
|
|
auto SelfTyLowered = getTypeLowering(SelfTy).getLoweredType();
|
|
for (VarDecl* vd : recursiveLinks) {
|
|
SILBasicBlock* cleanBB = createBasicBlock();
|
|
SILBasicBlock* noneBB = createBasicBlock();
|
|
SILBasicBlock* notUniqueBB = createBasicBlock();
|
|
SILBasicBlock* uniqueBB = createBasicBlock();
|
|
SILBasicBlock* someBB = createBasicBlock();
|
|
SILBasicBlock* loopBB = createBasicBlock();
|
|
|
|
// var iter = self.link
|
|
// self.link = nil
|
|
auto Ty = getTypeLowering(vd->getType()).getLoweredType();
|
|
auto optionalNone = B.createOptionalNone(cleanupLoc, Ty);
|
|
SILValue varAddr =
|
|
B.createRefElementAddr(cleanupLoc, selfValue.getValue(), vd,
|
|
Ty.getAddressType());
|
|
auto iterAddr = B.createAllocStack(cleanupLoc, Ty);
|
|
SILValue addr = B.createBeginAccess(
|
|
cleanupLoc, varAddr, SILAccessKind::Modify, SILAccessEnforcement::Static,
|
|
false /*noNestedConflict*/, false /*fromBuiltin*/);
|
|
SILValue iter = B.createLoad(cleanupLoc, addr, LoadOwnershipQualifier::Copy);
|
|
B.createStore(cleanupLoc, optionalNone, addr, StoreOwnershipQualifier::Assign);
|
|
B.createEndAccess(cleanupLoc, addr, false /*is aborting*/);
|
|
B.createStore(cleanupLoc, iter, iterAddr, StoreOwnershipQualifier::Init);
|
|
|
|
B.createBranch(cleanupLoc, loopBB);
|
|
|
|
// while iter != nil {
|
|
B.emitBlock(loopBB);
|
|
SILValue operand = B.createLoad(cleanupLoc, iterAddr, LoadOwnershipQualifier::Copy);
|
|
auto operandCopy = B.createCopyValue(cleanupLoc, operand);
|
|
auto operandAddr = B.createAllocStack(cleanupLoc, Ty);
|
|
B.createStore(cleanupLoc, operandCopy, operandAddr, StoreOwnershipQualifier::Init);
|
|
B.createDestroyValue(cleanupLoc, operand);
|
|
B.createSwitchEnumAddr(
|
|
cleanupLoc, operandAddr, nullptr,
|
|
{{getASTContext().getOptionalSomeDecl(), someBB},
|
|
{std::make_pair(getASTContext().getOptionalNoneDecl(), noneBB)}});
|
|
|
|
// if isKnownUniquelyReferenced(&iter) {
|
|
B.emitBlock(someBB);
|
|
B.createDestroyAddr(cleanupLoc, operandAddr);
|
|
B.createDeallocStack(cleanupLoc, operandAddr);
|
|
auto isUnique = B.createIsUnique(cleanupLoc, iterAddr);
|
|
B.createCondBranch(cleanupLoc, isUnique, uniqueBB, notUniqueBB);
|
|
|
|
|
|
// we have a uniquely referenced link, so we need to deinit
|
|
B.emitBlock(uniqueBB);
|
|
|
|
// NOTE: We increment the ref count of the tail instead of unlinking it,
|
|
// because custom deinit implementations of subclasses may access
|
|
// it and it would be semantically wrong to unset it before that.
|
|
// Making the tail non-uniquely referenced prevents the recursion.
|
|
|
|
// let tail = iter.unsafelyUnwrapped.next
|
|
// iter = tail
|
|
SILValue _iter = B.createLoad(cleanupLoc, iterAddr, LoadOwnershipQualifier::Copy);
|
|
auto iterBorrow = B.createBeginBorrow(cleanupLoc, _iter);
|
|
auto iterBorrowAddr = B.createAllocStack(cleanupLoc, Ty);
|
|
B.createStoreBorrow(cleanupLoc, iterBorrow, iterBorrowAddr);
|
|
auto xx = B.createLoadBorrow(cleanupLoc, iterBorrowAddr);
|
|
auto* link = B.createUncheckedEnumData(cleanupLoc,
|
|
xx,
|
|
getASTContext().getOptionalSomeDecl(),
|
|
SelfTyLowered);
|
|
varAddr = B.createRefElementAddr(cleanupLoc,
|
|
link,
|
|
vd,
|
|
Ty.getAddressType());
|
|
|
|
addr = B.createBeginAccess(
|
|
cleanupLoc, varAddr, SILAccessKind::Read, SILAccessEnforcement::Static,
|
|
false /* noNestedConflict */, false /*fromBuiltin*/);
|
|
iter = B.createLoad(cleanupLoc, addr, LoadOwnershipQualifier::Copy);
|
|
B.createEndAccess(cleanupLoc, addr, false /*is aborting*/);
|
|
B.createStore(cleanupLoc, iter, iterAddr, StoreOwnershipQualifier::Assign);
|
|
|
|
B.createEndBorrow(cleanupLoc, xx);
|
|
B.createEndBorrow(cleanupLoc, iterBorrow);
|
|
|
|
B.createDestroyValue(cleanupLoc, _iter);
|
|
B.createDeallocStack(cleanupLoc, iterBorrowAddr);
|
|
|
|
B.createBranch(cleanupLoc, loopBB);
|
|
|
|
// the next link in the chain is not unique, so we are done here
|
|
B.emitBlock(notUniqueBB);
|
|
B.createBranch(cleanupLoc, cleanBB);
|
|
|
|
// we reached the end of the chain
|
|
B.emitBlock(noneBB);
|
|
B.createDeallocStack(cleanupLoc, operandAddr);
|
|
B.createBranch(cleanupLoc, cleanBB);
|
|
|
|
|
|
B.emitBlock(cleanBB);
|
|
B.createDestroyAddr(cleanupLoc, iterAddr);
|
|
B.createDeallocStack(cleanupLoc, iterAddr);
|
|
}
|
|
}
|
|
|
|
void SILGenFunction::emitClassMemberDestruction(ManagedValue selfValue,
|
|
ClassDecl *cd,
|
|
CleanupLocation cleanupLoc) {
|
|
assert(selfValue.getOwnershipKind() == OwnershipKind::Guaranteed);
|
|
|
|
/// If this ClassDecl is a distributed actor, we must synthesise another code
|
|
/// path for deallocating a 'remote' actor. In that case, these basic blocks
|
|
/// are used to return to the "normal" (i.e. 'local' instance) destruction.
|
|
///
|
|
/// For other cases, the basic blocks are not necessary and the destructor
|
|
/// can just emit all the normal destruction code right into the current block.
|
|
// If set, used as the basic block for the destroying of all members.
|
|
SILBasicBlock* normalMemberDestroyBB = nullptr;
|
|
// If set, used as the basic block after members have been destroyed,
|
|
// and we're ready to perform final cleanups before returning.
|
|
SILBasicBlock* finishBB = nullptr;
|
|
|
|
/// A distributed actor may be 'remote' in which case there is no need to
|
|
/// destroy "all" members, because they never had storage to begin with.
|
|
if (cd->isDistributedActor()) {
|
|
normalMemberDestroyBB = createBasicBlock();
|
|
finishBB = createBasicBlock();
|
|
emitDistributedActorClassMemberDestruction(cleanupLoc, selfValue, cd,
|
|
normalMemberDestroyBB,
|
|
finishBB);
|
|
}
|
|
|
|
auto recursiveLinks = findRecursiveLinks(F.getDeclContext(), cd);
|
|
|
|
/// Destroy all members.
|
|
{
|
|
if (normalMemberDestroyBB)
|
|
B.emitBlock(normalMemberDestroyBB);
|
|
|
|
for (VarDecl *vd : cd->getStoredProperties()) {
|
|
if (recursiveLinks.contains(vd))
|
|
continue;
|
|
destroyClassMember(cleanupLoc, selfValue, vd);
|
|
}
|
|
|
|
if (!recursiveLinks.empty())
|
|
emitRecursiveChainDestruction(selfValue, cd, recursiveLinks, cleanupLoc);
|
|
|
|
if (finishBB)
|
|
B.createBranch(cleanupLoc, finishBB);
|
|
}
|
|
|
|
{
|
|
if (finishBB)
|
|
B.emitBlock(finishBB);
|
|
|
|
if (cd->isRootDefaultActor()) {
|
|
// TODO(distributed): we may need to call the distributed destroy here instead?
|
|
auto builtinName = getASTContext().getIdentifier(
|
|
getBuiltinName(BuiltinValueKind::DestroyDefaultActor));
|
|
auto resultTy = SGM.Types.getEmptyTupleType();
|
|
|
|
B.createBuiltin(cleanupLoc, builtinName, resultTy, /*subs*/{},
|
|
{ selfValue.getValue() });
|
|
}
|
|
}
|
|
}
|
|
|
|
void SILGenFunction::emitObjCDestructor(SILDeclRef dtor) {
|
|
auto dd = cast<DestructorDecl>(dtor.getDecl());
|
|
auto cd = cast<ClassDecl>(dd->getDeclContext());
|
|
MagicFunctionName = DeclName(SGM.M.getASTContext().getIdentifier("deinit"));
|
|
|
|
RegularLocation loc(dd);
|
|
if (dd->isImplicit())
|
|
loc.markAutoGenerated();
|
|
|
|
SILValue selfValue = emitSelfDecl(dd->getImplicitSelfDecl());
|
|
|
|
// Create a basic block to jump to for the implicit destruction behavior
|
|
// of releasing the elements and calling the superclass destructor.
|
|
// We won't actually emit the block until we finish with the destructor body.
|
|
prepareEpilog(None, false, CleanupLocation(loc));
|
|
|
|
emitProfilerIncrement(dd->getTypecheckedBody());
|
|
// Emit the destructor body.
|
|
emitStmt(dd->getTypecheckedBody());
|
|
|
|
Optional<SILValue> maybeReturnValue;
|
|
SILLocation returnLoc(loc);
|
|
std::tie(maybeReturnValue, returnLoc) = emitEpilogBB(loc);
|
|
|
|
if (!maybeReturnValue)
|
|
return;
|
|
|
|
auto cleanupLoc = CleanupLocation(loc);
|
|
|
|
// Note: the ivar destroyer is responsible for destroying the
|
|
// instance variables before the object is actually deallocated.
|
|
|
|
// Form a reference to the superclass -dealloc.
|
|
Type superclassTy = dd->mapTypeIntoContext(cd->getSuperclass());
|
|
assert(superclassTy && "Emitting Objective-C -dealloc without superclass?");
|
|
ClassDecl *superclass = superclassTy->getClassOrBoundGenericClass();
|
|
auto superclassDtorDecl = superclass->getDestructor();
|
|
auto superclassDtor = SILDeclRef(superclassDtorDecl,
|
|
SILDeclRef::Kind::Deallocator)
|
|
.asForeign();
|
|
auto superclassDtorType =
|
|
SGM.Types.getConstantType(getTypeExpansionContext(), superclassDtor);
|
|
SILValue superclassDtorValue = B.createObjCSuperMethod(
|
|
cleanupLoc, selfValue, superclassDtor,
|
|
superclassDtorType);
|
|
|
|
// Call the superclass's -dealloc.
|
|
SILType superclassSILTy = getLoweredLoadableType(superclassTy);
|
|
SILValue superSelf = B.createUpcast(cleanupLoc, selfValue, superclassSILTy);
|
|
assert(superSelf.getOwnershipKind() == OwnershipKind::Owned);
|
|
|
|
auto subMap
|
|
= superclassTy->getContextSubstitutionMap(SGM.M.getSwiftModule(),
|
|
superclass);
|
|
|
|
B.createApply(cleanupLoc, superclassDtorValue, subMap, superSelf);
|
|
|
|
// We know that the givne value came in at +1, but we pass the relevant value
|
|
// as unowned to the destructor. Create a fake balance for the verifier to be
|
|
// happy.
|
|
B.createEndLifetime(cleanupLoc, superSelf);
|
|
|
|
// Return.
|
|
B.createReturn(returnLoc, emitEmptyTuple(cleanupLoc));
|
|
}
|