Files
swift-mirror/test/IRGen/existentials_objc.sil
John McCall e249fd680e Destructure result types in SIL function types.
Similarly to how we've always handled parameter types, we
now recursively expand tuples in result types and separately
determine a result convention for each result.

The most important code-generation change here is that
indirect results are now returned separately from each
other and from any direct results.  It is generally far
better, when receiving an indirect result, to receive it
as an independent result; the caller is much more likely
to be able to directly receive the result in the address
they want to initialize, rather than having to receive it
in temporary memory and then copy parts of it into the
target.

The most important conceptual change here that clients and
producers of SIL must be aware of is the new distinction
between a SILFunctionType's *parameters* and its *argument
list*.  The former is just the formal parameters, derived
purely from the parameter types of the original function;
indirect results are no longer in this list.  The latter
includes the indirect result arguments; as always, all
the indirect results strictly precede the parameters.
Apply instructions and entry block arguments follow the
argument list, not the parameter list.

A relatively minor change is that there can now be multiple
direct results, each with its own result convention.
This is a minor change because I've chosen to leave
return instructions as taking a single operand and
apply instructions as producing a single result; when
the type describes multiple results, they are implicitly
bound up in a tuple.  It might make sense to split these
up and allow e.g. return instructions to take a list
of operands; however, it's not clear what to do on the
caller side, and this would be a major change that can
be separated out from this already over-large patch.

Unsurprisingly, the most invasive changes here are in
SILGen; this requires substantial reworking of both call
emission and reabstraction.  It also proved important
to switch several SILGen operations over to work with
RValue instead of ManagedValue, since otherwise they
would be forced to spuriously "implode" buffers.
2016-02-18 01:26:28 -08:00

177 lines
10 KiB
Plaintext

// RUN: rm -rf %t && mkdir %t
// RUN: %build-irgen-test-overlays
// RUN: %target-swift-frontend -sdk %S/Inputs -I %t %s -emit-ir | FileCheck %s
// REQUIRES: CPU=x86_64
// REQUIRES: objc_interop
sil_stage canonical
import gizmo
// rdar://16621578
sil @init_opaque_existential : $@convention(thin) <T where T : Gizmo> (@owned T) -> @out protocol<> {
bb0(%0 : $*protocol<>, %1 : $T):
%2 = init_existential_addr %0 : $*protocol<>, $T
store %1 to %2 : $*T
%3 = tuple ()
return %3 : $()
}
// CHECK-DAG: define{{( protected)?}} void @init_opaque_existential([[ANY:%"protocol<>"]]* noalias nocapture sret, [[GIZMO:%.*]]*, [[TYPE:%.*]]* %T)
// CHECK: [[T0:%.*]] = getelementptr inbounds [[ANY]], [[ANY]]* %0, i32 0, i32 1
// CHECK-NEXT: store [[TYPE]]* %T, [[TYPE]]** [[T0]], align 8
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds [[ANY]], [[ANY]]* %0, i32 0, i32 0
// CHECK-NEXT: [[T1:%.*]] = bitcast [24 x i8]* [[T0]] to [[GIZMO]]**
// CHECK-NEXT: store [[GIZMO]]* %1, [[GIZMO]]** [[T1]], align 8
// CHECK-NEXT: ret void
sil @take_opaque_existential : $@convention(thin) (@in protocol <>) -> @out protocol<> {
bb0(%0 : $*protocol<>, %1 : $*protocol<>):
copy_addr [take] %1 to [initialization] %0 : $*protocol<>
%3 = tuple ()
return %3 : $()
}
// CHECK-DAG: define{{( protected)?}} void @take_opaque_existential([[ANY:%"protocol<>"]]* noalias nocapture sret, %"protocol<>"* noalias nocapture dereferenceable({{.*}})) {{.*}} {
// CHECK: [[T0:%.*]] = getelementptr inbounds [[ANY]], [[ANY]]* [[SRC:%1]], i32 0, i32 1
// CHECK-NEXT: [[TYPE:%.*]] = load %swift.type*, %swift.type** [[T0]], align 8
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds [[ANY]], [[ANY]]* [[DEST:%0]], i32 0, i32 1
// CHECK-NEXT: store %swift.type* [[TYPE]], %swift.type** [[T0]], align 8
// CHECK-NEXT: [[SRC_BUF:%.*]] = getelementptr inbounds [[ANY]], [[ANY]]* [[SRC]], i32 0, i32 0
// CHECK-NEXT: [[DEST_BUF:%.*]] = getelementptr inbounds [[ANY]], [[ANY]]* [[DEST]], i32 0, i32 0
// CHECK-NEXT: [[T0:%.*]] = bitcast %swift.type* [[TYPE]] to i8***
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds i8**, i8*** [[T0]], i64 -1
// CHECK-NEXT: [[VWTABLE:%.*]] = load i8**, i8*** [[T1]], align 8
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds i8*, i8** [[VWTABLE]], i32 12
// CHECK-NEXT: [[T1:%.*]] = load i8*, i8** [[T0]], align 8
// CHECK-NEXT: [[INIT:%.*]] = bitcast i8* [[T1]] to %swift.opaque* ([24 x i8]*, [24 x i8]*, %swift.type*)*
// CHECK-NEXT: call %swift.opaque* [[INIT]]([24 x i8]* noalias [[DEST_BUF]], [24 x i8]* [[SRC_BUF]], %swift.type* [[TYPE]])
// CHECK-NEXT: ret void
// rdar://problem/19035529
@objc protocol OP {}
@objc protocol OP2: OP {}
// CHECK-DAG: define{{( protected)?}} %objc_object* @init_existential_objc_to_objc(%objc_object*) {{.*}} {
// CHECK: ret %objc_object* %0
sil @init_existential_objc_to_objc : $@convention(thin) (@owned OP2) -> @owned OP {
entry(%o : $OP2):
%a = init_existential_ref %o : $OP2 : $OP2, $OP
return %a : $OP
}
protocol CP: class {}
// CHECK-DAG: define{{( protected)?}} { %objc_object*, i8** } @class_existential_unowned(%objc_object*, i8**) {{.*}} {
sil @class_existential_unowned : $@convention(thin) (@owned CP) -> @owned CP {
entry(%s : $CP):
%u1 = alloc_stack $@sil_unowned CP
%u2 = alloc_stack $@sil_unowned CP
// CHECK: [[U1:%.*]] = alloca [[UREF:{ %swift.unowned, i8.. }]], align 8
// CHECK: [[U2:%.*]] = alloca [[UREF]], align 8
store_unowned %s to [initialization] %u1 : $*@sil_unowned CP
// CHECK: [[T0:%.*]] = getelementptr inbounds [[UREF]], [[UREF]]* [[U1]], i32 0, i32 1
// CHECK: store i8** %1, i8*** [[T0]], align 8
// CHECK: [[T0:%.*]] = getelementptr inbounds [[UREF]], [[UREF]]* [[U1]], i32 0, i32 0
// CHECK: call void @swift_unknownUnownedInit(%swift.unowned* [[T0]], %objc_object* %0)
// CHECK: [[T0:%.*]] = getelementptr inbounds [[UREF]], [[UREF]]* [[U1]], i32 0, i32 0
// CHECK: [[T1:%.*]] = call %objc_object* @swift_unknownUnownedLoadStrong(%swift.unowned* [[T0]])
%t = load_unowned %u1 : $*@sil_unowned CP
// CHECK: call void @swift_unknownRelease(%objc_object* [[T1]])
strong_release %t : $CP
dealloc_stack %u2 : $*@sil_unowned CP
dealloc_stack %u1 : $*@sil_unowned CP
%v = ref_to_unmanaged %s : $CP to $@sil_unmanaged CP
%z = unmanaged_to_ref %v : $@sil_unmanaged CP to $CP
// CHECK: [[RESULT_A:%.*]] = insertvalue { %objc_object*, i8** } undef, %objc_object* %0, 0
// CHECK: [[RESULT_B:%.*]] = insertvalue { %objc_object*, i8** } [[RESULT_A]], i8** %1, 1
// CHECK: ret { %objc_object*, i8** } [[RESULT_B]]
return %z : $CP
}
// CHECK-DAG: define{{( protected)?}} void @class_existential_weak({ %swift.weak, i8** }* noalias nocapture sret, i64, i64)
sil @class_existential_weak : $@convention(thin) (@owned CP?) -> @out @sil_weak CP? {
entry(%w : $*@sil_weak CP?, %a : $CP?):
// CHECK: [[V:%.*]] = alloca { %swift.weak, i8** }
%v = alloc_stack $@sil_weak CP?
// CHECK: [[SRC_REF:%.*]] = inttoptr {{.*}} %objc_object*
// CHECK: [[SRC_WITNESS:%.*]] = inttoptr {{.*}} i8**
// CHECK: [[DEST_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: store i8** [[SRC_WITNESS]], i8*** [[DEST_WITNESS_ADDR]]
// CHECK: [[DEST_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: call void @swift_unknownWeakInit(%swift.weak* [[DEST_REF_ADDR]], %objc_object* [[SRC_REF]])
store_weak %a to [initialization] %w : $*@sil_weak CP?
// CHECK: [[SRC_REF:%.*]] = inttoptr {{.*}} %objc_object*
// CHECK: [[SRC_WITNESS:%.*]] = inttoptr {{.*}} i8**
// CHECK: [[DEST_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: store i8** [[SRC_WITNESS]], i8*** [[DEST_WITNESS_ADDR]]
// CHECK: [[DEST_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: call void @swift_unknownWeakAssign(%swift.weak* [[DEST_REF_ADDR]], %objc_object* [[SRC_REF]])
store_weak %a to %w : $*@sil_weak CP?
// CHECK: [[SRC_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: [[DEST_REF:%.*]] = call %objc_object* @swift_unknownWeakTakeStrong(%swift.weak* [[SRC_REF_ADDR]])
// CHECK: [[SRC_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: [[DEST_WITNESS:%.*]] = load i8**, i8*** [[SRC_WITNESS_ADDR]]
%b = load_weak [take] %w : $*@sil_weak CP?
// CHECK: [[SRC_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: [[DEST_REF:%.*]] = call %objc_object* @swift_unknownWeakLoadStrong(%swift.weak* [[SRC_REF_ADDR]])
// CHECK: [[SRC_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: [[DEST_WITNESS:%.*]] = load i8**, i8*** [[SRC_WITNESS_ADDR]]
%c = load_weak %w : $*@sil_weak CP?
// CHECK: [[DEST_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 0
// CHECK: [[SRC_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: call void @swift_unknownWeakTakeInit(%swift.weak* [[DEST_REF_ADDR]], %swift.weak* [[SRC_REF_ADDR]])
// CHECK: [[SRC_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: [[WITNESS:%.*]] = load i8**, i8*** [[SRC_WITNESS_ADDR]], align 8
// CHECK: [[DEST_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 1
// CHECK: store i8** [[WITNESS]], i8*** [[DEST_WITNESS_ADDR]], align 8
copy_addr [take] %w to [initialization] %v : $*@sil_weak CP?
// CHECK: [[DEST_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 0
// CHECK: [[SRC_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: call void @swift_unknownWeakTakeAssign(%swift.weak* [[DEST_REF_ADDR]], %swift.weak* [[SRC_REF_ADDR]])
// CHECK: [[SRC_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: [[WITNESS:%.*]] = load i8**, i8*** [[SRC_WITNESS_ADDR]], align 8
// CHECK: [[DEST_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 1
// CHECK: store i8** [[WITNESS]], i8*** [[DEST_WITNESS_ADDR]], align 8
copy_addr [take] %w to %v : $*@sil_weak CP?
// CHECK: [[DEST_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 0
// CHECK: [[SRC_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: call void @swift_unknownWeakCopyInit(%swift.weak* [[DEST_REF_ADDR]], %swift.weak* [[SRC_REF_ADDR]])
// CHECK: [[SRC_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: [[WITNESS:%.*]] = load i8**, i8*** [[SRC_WITNESS_ADDR]], align 8
// CHECK: [[DEST_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 1
// CHECK: store i8** [[WITNESS]], i8*** [[DEST_WITNESS_ADDR]], align 8
copy_addr %w to [initialization] %v : $*@sil_weak CP?
// CHECK: [[DEST_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 0
// CHECK: [[SRC_REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
// CHECK: call void @swift_unknownWeakCopyAssign(%swift.weak* [[DEST_REF_ADDR]], %swift.weak* [[SRC_REF_ADDR]])
// CHECK: [[SRC_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
// CHECK: [[WITNESS:%.*]] = load i8**, i8*** [[SRC_WITNESS_ADDR]], align 8
// CHECK: [[DEST_WITNESS_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 1
// CHECK: store i8** [[WITNESS]], i8*** [[DEST_WITNESS_ADDR]], align 8
copy_addr %w to %v : $*@sil_weak CP?
// CHECK: [[REF_ADDR:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* [[V]], i32 0, i32 0
// CHECK: call void @swift_unknownWeakDestroy(%swift.weak* [[REF_ADDR]])
destroy_addr %v : $*@sil_weak CP?
dealloc_stack %v : $*@sil_weak CP?
return undef : $()
}