mirror of
https://github.com/apple/swift.git
synced 2025-12-14 20:36:38 +01:00
[semantic-arc] Add a new guaranteed ARC optimization pass.
Often times SILGen wants to hold onto values that have been copied. This causes an issue, when due to Cleanups firing, SILBuilder inserts destroys and destroys the copy that produced the value that SILGen held onto. This will then cause SILGen to emit incorrect code. There really is no reason to introduce such complexity into SILBuilder when a small simple guaranteed pass can perform the same work. Thus the introduction of this pass. In a later commit, I am going to eliminate the SILBuilder entry points. rdar://28685236
This commit is contained in:
@@ -127,6 +127,8 @@ PASS(GlobalOpt, "global-opt",
|
||||
"Global variable optimizations")
|
||||
PASS(GlobalPropertyOpt, "global-property-opt",
|
||||
"Optimize properties")
|
||||
PASS(GuaranteedARCOpts, "guaranteed-arc-opts",
|
||||
"Guaranteed ARC optimizations")
|
||||
PASS(HighLevelCSE, "high-level-cse",
|
||||
"Common subexpression elimination on High-level SIL")
|
||||
PASS(HighLevelLICM, "high-level-licm",
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
set(MANDATORY_SOURCES
|
||||
Mandatory/DefiniteInitialization.cpp
|
||||
Mandatory/MandatoryInlining.cpp
|
||||
Mandatory/DIMemoryUseCollector.cpp
|
||||
Mandatory/DataflowDiagnostics.cpp
|
||||
Mandatory/DiagnoseUnreachable.cpp
|
||||
Mandatory/GuaranteedARCOpts.cpp
|
||||
Mandatory/MandatoryInlining.cpp
|
||||
Mandatory/PredictableMemOpt.cpp
|
||||
Mandatory/ConstantPropagation.cpp
|
||||
PARENT_SCOPE)
|
||||
|
||||
225
lib/SILOptimizer/Mandatory/GuaranteedARCOpts.cpp
Normal file
225
lib/SILOptimizer/Mandatory/GuaranteedARCOpts.cpp
Normal file
@@ -0,0 +1,225 @@
|
||||
//===--- GuaranteedARCOpts.cpp --------------------------------------------===//
|
||||
//
|
||||
// This source file is part of the Swift.org open source project
|
||||
//
|
||||
// Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
|
||||
// Licensed under Apache License v2.0 with Runtime Library Exception
|
||||
//
|
||||
// See http://swift.org/LICENSE.txt for license information
|
||||
// See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#define DEBUG_TYPE "sil-guaranteed-arc-opts"
|
||||
#include "swift/SILOptimizer/PassManager/Passes.h"
|
||||
#include "swift/SILOptimizer/PassManager/Transforms.h"
|
||||
#include "swift/SIL/SILVisitor.h"
|
||||
|
||||
using namespace swift;
|
||||
|
||||
namespace {
|
||||
|
||||
struct GuaranteedARCOptsVisitor
|
||||
: SILInstructionVisitor<GuaranteedARCOptsVisitor, bool> {
|
||||
bool visitValueBase(ValueBase *V) { return false; }
|
||||
bool visitDestroyAddrInst(DestroyAddrInst *DAI);
|
||||
bool visitStrongReleaseInst(StrongReleaseInst *SRI);
|
||||
bool visitDestroyValueInst(DestroyValueInst *DVI);
|
||||
bool visitReleaseValueInst(ReleaseValueInst *RVI);
|
||||
};
|
||||
|
||||
} // end anonymous namespace
|
||||
|
||||
static SILBasicBlock::reverse_iterator
|
||||
getPrevReverseIterator(SILInstruction *I) {
|
||||
auto Iter = std::next(I->getIterator());
|
||||
return std::next(SILBasicBlock::reverse_iterator(Iter));
|
||||
}
|
||||
|
||||
bool GuaranteedARCOptsVisitor::visitDestroyAddrInst(DestroyAddrInst *DAI) {
|
||||
SILValue Operand = DAI->getOperand();
|
||||
|
||||
for (auto II = getPrevReverseIterator(DAI), IE = DAI->getParent()->rend();
|
||||
II != IE;) {
|
||||
auto *Inst = &*II;
|
||||
++II;
|
||||
|
||||
if (auto *CA = dyn_cast<CopyAddrInst>(Inst)) {
|
||||
if (CA->getSrc() == Operand && !CA->isTakeOfSrc()) {
|
||||
CA->setIsTakeOfSrc(IsTake);
|
||||
DAI->eraseFromParent();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// destroy_addrs commonly exist in a block of dealloc_stack's, which don't
|
||||
// affect take-ability.
|
||||
if (isa<DeallocStackInst>(Inst))
|
||||
continue;
|
||||
|
||||
// This code doesn't try to prove tricky validity constraints about whether
|
||||
// it is safe to push the destroy_addr past interesting instructions.
|
||||
if (Inst->mayHaveSideEffects())
|
||||
break;
|
||||
}
|
||||
|
||||
// If we didn't find a copy_addr to fold this into, emit the destroy_addr.
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool couldReduceStrongRefcount(SILInstruction *Inst) {
|
||||
// Simple memory accesses cannot reduce refcounts.
|
||||
if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst) ||
|
||||
isa<RetainValueInst>(Inst) || isa<UnownedRetainInst>(Inst) ||
|
||||
isa<UnownedReleaseInst>(Inst) || isa<StrongRetainUnownedInst>(Inst) ||
|
||||
isa<StoreWeakInst>(Inst) || isa<StrongRetainInst>(Inst) ||
|
||||
isa<AllocStackInst>(Inst) || isa<DeallocStackInst>(Inst))
|
||||
return false;
|
||||
|
||||
// Assign and copyaddr of trivial types cannot drop refcounts, and 'inits'
|
||||
// never can either. Nontrivial ones can though, because the overwritten
|
||||
// value drops a retain. We would have to do more alias analysis to be able
|
||||
// to safely ignore one of those.
|
||||
if (auto *AI = dyn_cast<AssignInst>(Inst)) {
|
||||
SILType StoredType = AI->getOperand(0)->getType();
|
||||
if (StoredType.isTrivial(Inst->getModule()) ||
|
||||
StoredType.is<ReferenceStorageType>())
|
||||
return false;
|
||||
}
|
||||
|
||||
if (auto *CAI = dyn_cast<CopyAddrInst>(Inst)) {
|
||||
// Initializations can only increase refcounts.
|
||||
if (CAI->isInitializationOfDest())
|
||||
return false;
|
||||
|
||||
SILType StoredType = CAI->getOperand(0)->getType().getObjectType();
|
||||
if (StoredType.isTrivial(Inst->getModule()) ||
|
||||
StoredType.is<ReferenceStorageType>())
|
||||
return false;
|
||||
}
|
||||
|
||||
// This code doesn't try to prove tricky validity constraints about whether
|
||||
// it is safe to push the release past interesting instructions.
|
||||
return Inst->mayHaveSideEffects();
|
||||
}
|
||||
|
||||
bool GuaranteedARCOptsVisitor::visitStrongReleaseInst(StrongReleaseInst *SRI) {
|
||||
SILValue Operand = SRI->getOperand();
|
||||
// Release on a functionref is a noop.
|
||||
if (isa<FunctionRefInst>(Operand)) {
|
||||
SRI->eraseFromParent();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check to see if the instruction immediately before the insertion point is a
|
||||
// strong_retain of the specified operand. If so, we can zap the pair.
|
||||
for (auto II = getPrevReverseIterator(SRI), IE = SRI->getParent()->rend();
|
||||
II != IE;) {
|
||||
auto *Inst = &*II;
|
||||
++II;
|
||||
|
||||
if (auto *SRA = dyn_cast<StrongRetainInst>(Inst)) {
|
||||
if (SRA->getOperand() == Operand) {
|
||||
SRA->eraseFromParent();
|
||||
SRI->eraseFromParent();
|
||||
return true;
|
||||
}
|
||||
// Skip past unrelated retains.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scan past simple instructions that cannot reduce strong refcounts.
|
||||
if (couldReduceStrongRefcount(Inst))
|
||||
break;
|
||||
}
|
||||
|
||||
// If we didn't find a retain to fold this into, return false.
|
||||
return false;
|
||||
}
|
||||
|
||||
bool GuaranteedARCOptsVisitor::visitDestroyValueInst(DestroyValueInst *DVI) {
|
||||
SILValue Operand = DVI->getOperand();
|
||||
for (auto II = getPrevReverseIterator(DVI), IE = DVI->getParent()->rend();
|
||||
II != IE;) {
|
||||
auto *Inst = &*II;
|
||||
++II;
|
||||
|
||||
if (auto *CVI = dyn_cast<CopyValueInst>(Inst)) {
|
||||
if (SILValue(CVI) == Operand || CVI->getOperand() == Operand) {
|
||||
CVI->replaceAllUsesWith(CVI->getOperand());
|
||||
CVI->eraseFromParent();
|
||||
DVI->eraseFromParent();
|
||||
return true;
|
||||
}
|
||||
// Skip past unrelated retains.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scan past simple instructions that cannot reduce refcounts.
|
||||
if (couldReduceStrongRefcount(Inst))
|
||||
break;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool GuaranteedARCOptsVisitor::visitReleaseValueInst(ReleaseValueInst *RVI) {
|
||||
SILValue Operand = RVI->getOperand();
|
||||
|
||||
for (auto II = getPrevReverseIterator(RVI), IE = RVI->getParent()->rend();
|
||||
II != IE;) {
|
||||
auto *Inst = &*II;
|
||||
++II;
|
||||
|
||||
if (auto *SRA = dyn_cast<RetainValueInst>(Inst)) {
|
||||
if (SRA->getOperand() == Operand) {
|
||||
SRA->eraseFromParent();
|
||||
RVI->eraseFromParent();
|
||||
return true;
|
||||
}
|
||||
// Skip past unrelated retains.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scan past simple instructions that cannot reduce refcounts.
|
||||
if (couldReduceStrongRefcount(Inst))
|
||||
break;
|
||||
}
|
||||
|
||||
// If we didn't find a retain to fold this into, emit the release.
|
||||
return false;
|
||||
}
|
||||
|
||||
//===----------------------------------------------------------------------===//
|
||||
// Top Level Entrypoint
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
namespace {
|
||||
|
||||
struct GuaranteedARCOpts : SILFunctionTransform {
|
||||
void run() override {
|
||||
GuaranteedARCOptsVisitor Visitor;
|
||||
|
||||
bool MadeChange = false;
|
||||
SILFunction *F = getFunction();
|
||||
for (auto &BB : *F) {
|
||||
for (auto II = BB.begin(), IE = BB.end(); II != IE;) {
|
||||
SILInstruction *I = &*II;
|
||||
++II;
|
||||
MadeChange |= Visitor.visit(I);
|
||||
}
|
||||
}
|
||||
|
||||
if (MadeChange) {
|
||||
invalidateAnalysis(SILAnalysis::InvalidationKind::Instructions);
|
||||
}
|
||||
}
|
||||
|
||||
StringRef getName() override { return "Guaranteed ARC Opts"; }
|
||||
};
|
||||
|
||||
} // end swift namespace
|
||||
|
||||
SILTransform *swift::createGuaranteedARCOpts() {
|
||||
return new GuaranteedARCOpts();
|
||||
}
|
||||
@@ -101,6 +101,7 @@ bool swift::runSILDiagnosticPasses(SILModule &Module) {
|
||||
PM.addMandatoryInlining();
|
||||
PM.addPredictableMemoryOptimizations();
|
||||
PM.addDiagnosticConstantPropagation();
|
||||
PM.addGuaranteedARCOpts();
|
||||
PM.addDiagnoseUnreachable();
|
||||
PM.addEmitDFDiagnostics();
|
||||
// Canonical swift requires all non cond_br critical edges to be split.
|
||||
|
||||
@@ -18,7 +18,6 @@ public func ifelseexpr() -> Int64 {
|
||||
x.x -= 1
|
||||
}
|
||||
// CHECK: @swift_rt_swift_release to void (%C6return1X*)*)(%C6return1X* [[X]])
|
||||
// CHECK: @swift_rt_swift_release to void (%C6return1X*)*)(%C6return1X* [[X]])
|
||||
// CHECK-SAME: , !dbg ![[RELEASE:.*]]
|
||||
|
||||
// The ret instruction should be in the same scope as the return expression.
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
|
||||
import Swift
|
||||
|
||||
sil @error_user : $@convention(thin) (@owned Error) -> ()
|
||||
|
||||
// CHECK-LABEL: define{{( protected)?}} void @retain_release_boxed_existential(%swift.error*)
|
||||
sil @retain_release_boxed_existential : $@convention(thin) (Error) -> () {
|
||||
sil @retain_release_boxed_existential : $@convention(thin) (@owned Error) -> () {
|
||||
entry(%e : $Error):
|
||||
// CHECK-objc: @swift_errorRetain
|
||||
// CHECK-native: @swift_rt_swift_retain
|
||||
strong_retain %e : $Error
|
||||
%1 = function_ref @error_user : $@convention(thin) (@owned Error) -> ()
|
||||
apply %1(%e) : $@convention(thin) (@owned Error) -> ()
|
||||
// CHECK-objc: @swift_errorRelease
|
||||
// CHECK-native: @swift_rt_swift_release
|
||||
strong_release %e : $Error
|
||||
|
||||
@@ -28,6 +28,8 @@ enum SinglePayloadNontrivial {
|
||||
case c
|
||||
}
|
||||
|
||||
sil @single_payload_nontrivial_user : $@convention(thin) (@owned SinglePayloadNontrivial) -> ()
|
||||
|
||||
enum MultiPayloadTrivial {
|
||||
case payload1(Builtin.Int64)
|
||||
case payload2(Builtin.Int32, Builtin.Int32)
|
||||
@@ -185,9 +187,11 @@ enum GenericFixedLayout<T> {
|
||||
// CHECK: %swift.type** null
|
||||
// CHECK: }>
|
||||
|
||||
sil @single_payload_nontrivial_copy_destroy : $(SinglePayloadNontrivial) -> () {
|
||||
sil @single_payload_nontrivial_copy_destroy : $(@owned SinglePayloadNontrivial) -> () {
|
||||
bb0(%0 : $SinglePayloadNontrivial):
|
||||
retain_value %0 : $SinglePayloadNontrivial
|
||||
%1 = function_ref @single_payload_nontrivial_user : $@convention(thin) (@owned SinglePayloadNontrivial) -> ()
|
||||
apply %1(%0) : $@convention(thin) (@owned SinglePayloadNontrivial) -> ()
|
||||
release_value %0 : $SinglePayloadNontrivial
|
||||
%v = tuple ()
|
||||
return %v : $()
|
||||
|
||||
@@ -68,6 +68,9 @@ class Octogenarian : Contrarian {
|
||||
@objc override func eviscerate() {}
|
||||
}
|
||||
|
||||
@_silgen_name("unknown")
|
||||
func unknown(_ x: id) -> id
|
||||
|
||||
// CHECK: define hidden %objc_object* @_TF4objc5test0{{.*}}(%objc_object*)
|
||||
// CHECK-NOT: call {{.*}} @swift_unknownRetain
|
||||
// CHECK: call {{.*}} @swift_unknownRetain
|
||||
@@ -77,6 +80,7 @@ class Octogenarian : Contrarian {
|
||||
func test0(_ arg: id) -> id {
|
||||
var x : id
|
||||
x = arg
|
||||
unknown(x)
|
||||
var y = x
|
||||
return y
|
||||
}
|
||||
|
||||
57
test/SILOptimizer/guaranteed_arc_opts.sil
Normal file
57
test/SILOptimizer/guaranteed_arc_opts.sil
Normal file
@@ -0,0 +1,57 @@
|
||||
// RUN: %target-sil-opt -guaranteed-arc-opts %s | %FileCheck %s
|
||||
|
||||
sil_stage raw
|
||||
|
||||
import Builtin
|
||||
|
||||
sil @kraken : $@convention(thin) () -> ()
|
||||
|
||||
// CHECK-LABEL: sil @copyvalue_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () {
|
||||
// CHECK: bb0([[ARG1:%.*]] : $Builtin.NativeObject, [[ARG2:%.*]] : $Builtin.NativeObject):
|
||||
// CHECK-NOT: copy_value [[ARG1]]
|
||||
// CHECK: copy_value [[ARG2]]
|
||||
// CHECK-NOT: destroy_value [[ARG1]]
|
||||
sil @copyvalue_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject):
|
||||
%2 = copy_value %0 : $Builtin.NativeObject
|
||||
copy_value %1 : $Builtin.NativeObject
|
||||
destroy_value %2 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @copyvalue_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> Builtin.NativeObject {
|
||||
// CHECK: bb0([[ARG1:%.*]] : $Builtin.NativeObject
|
||||
// CHECK-NOT: copy_value
|
||||
// CHECK-NOT: destroy_value
|
||||
// CHECK: return [[ARG1]]
|
||||
sil @copyvalue_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> Builtin.NativeObject {
|
||||
bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.Int32):
|
||||
%2 = copy_value %0 : $Builtin.NativeObject
|
||||
%3 = integer_literal $Builtin.Int32, 0
|
||||
store %3 to [trivial] %1 : $*Builtin.Int32
|
||||
destroy_value %0 : $Builtin.NativeObject
|
||||
return %2 : $Builtin.NativeObject
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @copyvalue_test3 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
// CHECK: copy_value
|
||||
// CHECK: destroy_value
|
||||
sil @copyvalue_test3 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject):
|
||||
copy_value %0 : $Builtin.NativeObject
|
||||
%1 = function_ref @kraken : $@convention(thin) () -> ()
|
||||
apply %1() : $@convention(thin) () -> ()
|
||||
destroy_value %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @copyvalue_test4 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
// CHECK: destroy_value
|
||||
sil @copyvalue_test4 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject):
|
||||
destroy_value %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
139
test/SILOptimizer/guaranteed_arc_opts_qualified.sil
Normal file
139
test/SILOptimizer/guaranteed_arc_opts_qualified.sil
Normal file
@@ -0,0 +1,139 @@
|
||||
// RUN: %target-sil-opt -assume-parsing-unqualified-ownership-sil -guaranteed-arc-opts %s | %FileCheck %s
|
||||
|
||||
sil_stage raw
|
||||
|
||||
import Builtin
|
||||
|
||||
sil @kraken : $@convention(thin) () -> ()
|
||||
|
||||
// CHECK-LABEL: sil @retainvalue_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () {
|
||||
// CHECK: bb0([[ARG1:%.*]] : $Builtin.NativeObject, [[ARG2:%.*]] : $Builtin.NativeObject):
|
||||
// CHECK-NOT: retain_value [[ARG1]]
|
||||
// CHECK: retain_value [[ARG2]]
|
||||
// CHECK-NOT: release_value [[ARG1]]
|
||||
sil @retainvalue_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject):
|
||||
retain_value %0 : $Builtin.NativeObject
|
||||
retain_value %1 : $Builtin.NativeObject
|
||||
release_value %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @retainvalue_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () {
|
||||
// CHECK-NOT: retain_value
|
||||
// CHECK-NOT: release_value
|
||||
sil @retainvalue_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.Int32):
|
||||
retain_value %0 : $Builtin.NativeObject
|
||||
%2 = integer_literal $Builtin.Int32, 0
|
||||
store %2 to %1 : $*Builtin.Int32
|
||||
release_value %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @retainvalue_test3 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
// CHECK: retain_value
|
||||
// CHECK: release_value
|
||||
sil @retainvalue_test3 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject):
|
||||
retain_value %0 : $Builtin.NativeObject
|
||||
%1 = function_ref @kraken : $@convention(thin) () -> ()
|
||||
apply %1() : $@convention(thin) () -> ()
|
||||
release_value %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @retainvalue_test4 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
// CHECK: release_value
|
||||
sil @retainvalue_test4 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject):
|
||||
release_value %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @strongretain_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () {
|
||||
// CHECK: bb0([[ARG1:%.*]] : $Builtin.NativeObject, [[ARG2:%.*]] : $Builtin.NativeObject):
|
||||
// CHECK-NOT: strong_retain [[ARG1]]
|
||||
// CHECK: strong_retain [[ARG2]]
|
||||
// CHECK-NOT: strong_release [[ARG1]]
|
||||
sil @strongretain_test1 : $@convention(thin) (Builtin.NativeObject, Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject, %1 : $Builtin.NativeObject):
|
||||
strong_retain %0 : $Builtin.NativeObject
|
||||
strong_retain %1 : $Builtin.NativeObject
|
||||
strong_release %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @strongretain_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () {
|
||||
// CHECK-NOT: strong_retain
|
||||
// CHECK-NOT: strong_release
|
||||
sil @strongretain_test2 : $@convention(thin) (Builtin.NativeObject, @in Builtin.Int32) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject, %1 : $*Builtin.Int32):
|
||||
strong_retain %0 : $Builtin.NativeObject
|
||||
%2 = integer_literal $Builtin.Int32, 0
|
||||
store %2 to %1 : $*Builtin.Int32
|
||||
strong_release %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @strongretain_test3 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
// CHECK: strong_retain
|
||||
// CHECK: strong_release
|
||||
sil @strongretain_test3 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject):
|
||||
strong_retain %0 : $Builtin.NativeObject
|
||||
%1 = function_ref @kraken : $@convention(thin) () -> ()
|
||||
apply %1() : $@convention(thin) () -> ()
|
||||
strong_release %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @strongretain_test4 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
// CHECK: strong_release
|
||||
sil @strongretain_test4 : $@convention(thin) (Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $Builtin.NativeObject):
|
||||
strong_release %0 : $Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @copyaddr_test1 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject {
|
||||
// CHECK: bb0([[ARG1:%.*]] : $*Builtin.NativeObject, [[ARG2:%.*]] : $*Builtin.NativeObject):
|
||||
// CHECK: copy_addr [take] {{%.*}} to {{%.*}}
|
||||
// CHECK-NOT: destroy_addr [[ARG1]]
|
||||
sil @copyaddr_test1 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject {
|
||||
bb0(%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject):
|
||||
copy_addr %1 to %0 : $*Builtin.NativeObject
|
||||
destroy_addr %1 : $*Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @copyaddr_test2 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject {
|
||||
// CHECK: copy_addr
|
||||
// CHECK: destroy_addr
|
||||
sil @copyaddr_test2 : $@convention(thin) (@in Builtin.NativeObject) -> @out Builtin.NativeObject {
|
||||
bb0(%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject):
|
||||
copy_addr %1 to %0 : $*Builtin.NativeObject
|
||||
%2 = function_ref @kraken : $@convention(thin) () -> ()
|
||||
apply %2() : $@convention(thin) () -> ()
|
||||
destroy_addr %1 : $*Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
|
||||
// CHECK-LABEL: sil @copyaddr_test3 : $@convention(thin) (@in Builtin.NativeObject) -> () {
|
||||
// CHECK: destroy_addr
|
||||
sil @copyaddr_test3 : $@convention(thin) (@in Builtin.NativeObject) -> () {
|
||||
bb0(%0 : $*Builtin.NativeObject):
|
||||
destroy_addr %0 : $*Builtin.NativeObject
|
||||
%9999 = tuple()
|
||||
return %9999 : $()
|
||||
}
|
||||
Reference in New Issue
Block a user