mirror of
https://github.com/apple/swift.git
synced 2025-12-14 20:36:38 +01:00
These were added in https://github.com/swiftlang/swift/pull/81375 (and several other follow-up PRs because we missed a few places) and are no longer needed.
786 lines
40 KiB
Plaintext
786 lines
40 KiB
Plaintext
// RUN: %empty-directory(%t)
|
|
// RUN: %target-swift-frontend -emit-module -enable-library-evolution -emit-module-path=%t/resilient_struct.swiftmodule -module-name=resilient_struct %S/../Inputs/resilient_struct.swift
|
|
// RUN: %target-swift-frontend -Xllvm -sil-disable-pass=OnoneSimplification -I %t -emit-ir %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-ptrsize
|
|
|
|
// UNSUPPORTED: CPU=arm64e
|
|
|
|
// REQUIRES: PTRSIZE=64
|
|
|
|
import Builtin
|
|
import Swift
|
|
import resilient_struct
|
|
|
|
class SwiftClass {}
|
|
sil_vtable SwiftClass {}
|
|
sil @$s13partial_apply10SwiftClassCfD : $@convention(method) (SwiftClass) -> ()
|
|
|
|
sil @partially_applyable_to_class : $@convention(thin) (@guaranteed SwiftClass) -> ()
|
|
sil @partially_applyable_to_two_classes : $@convention(thin) (@guaranteed SwiftClass, @guaranteed SwiftClass) -> ()
|
|
|
|
sil @use_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
|
|
|
|
// CHECK: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_class(ptr %0) {{.*}} {
|
|
// CHECK: entry:
|
|
// CHECK: %1 = insertvalue { ptr, ptr } { ptr @"$s28partially_applyable_to_classTA", ptr undef }, ptr %0, 1
|
|
// CHECK: ret { ptr, ptr } %1
|
|
// CHECK: }
|
|
sil @partial_apply_class : $@convention(thin) (SwiftClass) -> @callee_owned () -> () {
|
|
entry(%c : $SwiftClass):
|
|
%f = function_ref @partially_applyable_to_class : $@convention(thin) (@guaranteed SwiftClass) -> ()
|
|
%g = partial_apply %f(%c) : $@convention(thin) (@guaranteed SwiftClass) -> ()
|
|
return %g : $@callee_owned () -> ()
|
|
}
|
|
|
|
// CHECK: define{{.*}} swiftcc void @partial_apply_class_on_stack(ptr %0)
|
|
// CHECK: entry:
|
|
// CHECK: call swiftcc void @use_closure(ptr @"$s28partially_applyable_to_classTA.1", ptr %0)
|
|
// CHECK: call void @swift_release(ptr %0)
|
|
// CHECK: ret void
|
|
// CHECK: }
|
|
|
|
sil @partial_apply_class_on_stack : $@convention(thin) (@owned SwiftClass) -> () {
|
|
entry(%a : $SwiftClass):
|
|
%f = function_ref @partially_applyable_to_class : $@convention(thin) (@guaranteed SwiftClass) -> ()
|
|
%c = partial_apply [callee_guaranteed] [on_stack] %f(%a) : $@convention(thin) (@guaranteed SwiftClass) -> ()
|
|
%use = function_ref @use_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
|
|
apply %use(%c) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
|
|
dealloc_stack %c : $@noescape @callee_guaranteed () ->()
|
|
strong_release %a : $SwiftClass
|
|
%t = tuple()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK: define{{.*}} swiftcc void @partial_apply_two_classes_on_stack(ptr %0, ptr %1)
|
|
// CHECK: entry:
|
|
// CHECK: [[CTX:%.*]] = alloca i8, i64 32, align 16
|
|
// CHECK: [[CAPTURE1:%.*]] = getelementptr inbounds{{.*}} <{ %swift.refcounted, ptr, ptr }>, ptr [[CTX]], i32 0, i32 1
|
|
// CHECK: store ptr %0, ptr [[CAPTURE1]], align 8
|
|
// CHECK: [[CAPTURE2:%.*]] = getelementptr inbounds{{.*}} <{ %swift.refcounted, ptr, ptr }>, ptr [[CTX]], i32 0, i32 2
|
|
// CHECK: store ptr %1, ptr [[CAPTURE2]], align 8
|
|
// CHECK: call swiftcc void @use_closure(ptr @"$s34partially_applyable_to_two_classesTA", ptr [[CTX]])
|
|
// CHECK: call void @swift_release(ptr %0)
|
|
// CHECK: call void @swift_release(ptr %1)
|
|
// CHECK: ret void
|
|
// CHECK: }
|
|
|
|
sil @partial_apply_two_classes_on_stack : $@convention(thin) (@owned SwiftClass, @owned SwiftClass) -> () {
|
|
entry(%a : $SwiftClass, %b: $SwiftClass):
|
|
%f = function_ref @partially_applyable_to_two_classes : $@convention(thin) (@guaranteed SwiftClass, @guaranteed SwiftClass) -> ()
|
|
%c = partial_apply [callee_guaranteed] [on_stack] %f(%a, %b) : $@convention(thin) (@guaranteed SwiftClass, @guaranteed SwiftClass) -> ()
|
|
%use = function_ref @use_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
|
|
apply %use(%c) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
|
|
dealloc_stack %c : $@noescape @callee_guaranteed () ->()
|
|
strong_release %a : $SwiftClass
|
|
strong_release %b : $SwiftClass
|
|
%t = tuple()
|
|
return %t : $()
|
|
}
|
|
//
|
|
// Check that partially applied generic parameters are correctly substituted
|
|
// in the forwarder.
|
|
//
|
|
|
|
// CHECK-LABEL: define internal swiftcc i64 @"$s22generic_captured_paramTA"(i64 %0, ptr swiftself %1) {{.*}} {
|
|
sil public_external @generic_captured_param : $@convention(thin) <T> (Int, @inout T) -> Int
|
|
|
|
sil @partial_apply_generic_capture : $@convention(thin) (Int) -> @callee_owned (Int) -> Int {
|
|
entry(%x : $Int):
|
|
%a = alloc_stack $Int
|
|
store %x to %a : $*Int
|
|
%f = function_ref @generic_captured_param : $@convention(thin) <T> (Int, @inout T) -> Int
|
|
%p = partial_apply %f<Int>(%a) : $@convention(thin) <T> (Int, @inout T) -> Int
|
|
dealloc_stack %a : $*Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
sil public_external @generic_captured_and_open_param : $@convention(thin) <T> (@in T, @inout T) -> @out T
|
|
|
|
// CHECK-LABEL: define {{.*}} @partial_apply_open_generic_capture({{.*}} ptr %T) {{.*}} {
|
|
// CHECK: store ptr %T
|
|
// CHECK: insertvalue {{.*}} [[PARTIAL_APPLY_STUB:@"\$s[A-Za-z0-9_]+TA"]]
|
|
|
|
// CHECK: define {{.*}} [[PARTIAL_APPLY_STUB]]
|
|
// CHECK: [[T:%.*]] = load ptr, ptr
|
|
// CHECK: tail call swiftcc void @generic_captured_and_open_param({{.*}} ptr [[T]])
|
|
sil @partial_apply_open_generic_capture : $@convention(thin) <T> (@inout T) -> @callee_owned (@in T) -> @out T {
|
|
entry(%a : $*T):
|
|
%f = function_ref @generic_captured_and_open_param : $@convention(thin) <U> (@in U, @inout U) -> @out U
|
|
%p = partial_apply %f<T>(%a) : $@convention(thin) <U> (@in U, @inout U) -> @out U
|
|
return %p : $@callee_owned (@in T) -> @out T
|
|
}
|
|
|
|
/*****************************************************************************/
|
|
/* Swift-refcounted class captures. Optimizable by using the reference */
|
|
/* as the partial apply context. */
|
|
/*****************************************************************************/
|
|
|
|
sil public_external @guaranteed_captured_class_param : $@convention(thin) (Int, @guaranteed SwiftClass) -> Int
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_guaranteed_class_param(ptr %0)
|
|
// CHECK: [[T0:%.*]] = insertvalue {{.*}} [[PARTIAL_APPLY_FORWARDER:@"\$s[A-Za-z0-9_]+TA"]]
|
|
// CHECK: ret {{.*}} [[T0]]
|
|
|
|
// CHECK: define internal swiftcc i64 [[PARTIAL_APPLY_FORWARDER]]
|
|
// CHECK-NOT: retain
|
|
// CHECK: [[RESULT:%.*]] = call swiftcc i64 @guaranteed_captured_class_param(i64 %0, ptr %1)
|
|
// CHECK: release{{.*}} %1)
|
|
// CHECK: ret i64 [[RESULT]]
|
|
|
|
sil @partial_apply_guaranteed_class_param : $@convention(thin) (@owned SwiftClass) -> @callee_owned (Int) -> Int {
|
|
bb0(%x : $SwiftClass):
|
|
%f = function_ref @guaranteed_captured_class_param : $@convention(thin) (Int, @guaranteed SwiftClass) -> Int
|
|
%p = partial_apply %f(%x) : $@convention(thin) (Int, @guaranteed SwiftClass) -> Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
sil public_external @indirect_guaranteed_captured_class_param : $@convention(thin) (Int, @in_guaranteed SwiftClass) -> Int
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_indirect_guaranteed_class_param(ptr noalias captures(none) dereferenceable({{.*}}) %0)
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[X:%.*]] = load ptr, ptr %0
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[T0:%.*]] = insertvalue {{.*}} [[PARTIAL_APPLY_FORWARDER:@"\$s[A-Za-z0-9_]+TA"]], {{.*}} [[X]]
|
|
// CHECK: ret {{.*}} [[T0]]
|
|
|
|
// CHECK: define internal swiftcc i64 [[PARTIAL_APPLY_FORWARDER]]
|
|
// CHECK: [[X_TMP:%.*]] = alloca ptr
|
|
// CHECK-NEXT: store ptr %1, ptr [[X_TMP]], align
|
|
// CHECK-NOT: load
|
|
// CHECK-NOT: retain
|
|
// CHECK-NOT: release
|
|
// CHECK: [[RESULT:%.*]] = call swiftcc i64 @indirect_guaranteed_captured_class_param(i64 %0, ptr noalias captures(none) dereferenceable({{.*}}) [[X_TMP]]
|
|
// CHECK-NOT: retain
|
|
// CHECK: call void @swift_release(ptr %1)
|
|
// CHECK: ret i64 [[RESULT]]
|
|
|
|
sil @partial_apply_indirect_guaranteed_class_param : $@convention(thin) (@in SwiftClass) -> @callee_owned (Int) -> Int {
|
|
bb0(%x : $*SwiftClass):
|
|
%f = function_ref @indirect_guaranteed_captured_class_param : $@convention(thin) (Int, @in_guaranteed SwiftClass) -> Int
|
|
%p = partial_apply %f(%x) : $@convention(thin) (Int, @in_guaranteed SwiftClass) -> Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
sil public_external @indirect_consumed_captured_class_param : $@convention(thin) (Int, @in SwiftClass) -> Int
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_indirect_consumed_class_param(ptr noalias captures(none) dereferenceable({{.*}}) %0)
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[CONTEXT_OBJ:%.*]] = load ptr, ptr %0
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[T0:%.*]] = insertvalue {{.*}} [[PARTIAL_APPLY_FORWARDER:@"\$s[A-Za-z0-9_]+TA"]], {{.*}} [[CONTEXT_OBJ]]
|
|
// CHECK: ret {{.*}} [[T0]]
|
|
|
|
// CHECK: define internal swiftcc i64 [[PARTIAL_APPLY_FORWARDER]]
|
|
// CHECK: [[X_TMP:%.*]] = alloca ptr
|
|
// CHECK-NEXT: store ptr %1, ptr [[X_TMP]], align
|
|
// CHECK-NOT: load
|
|
// CHECK-NOT: retain
|
|
// CHECK-NOT: release
|
|
// CHECK: [[RESULT:%.*]] = call swiftcc i64 @indirect_consumed_captured_class_param(i64 %0, ptr noalias captures(none) dereferenceable({{.*}}) [[X_TMP]])
|
|
// CHECK-NOT: retain
|
|
// CHECK-NOT: release
|
|
// CHECK: ret i64 [[RESULT]]
|
|
|
|
sil @partial_apply_indirect_consumed_class_param : $@convention(thin) (@in SwiftClass) -> @callee_owned (Int) -> Int {
|
|
bb0(%x : $*SwiftClass):
|
|
%f = function_ref @indirect_consumed_captured_class_param : $@convention(thin) (Int, @in SwiftClass) -> Int
|
|
%p = partial_apply %f(%x) : $@convention(thin) (Int, @in SwiftClass) -> Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
/*****************************************************************************/
|
|
/* A non-trivial capture. Indirect applications can directly reference the */
|
|
/* field from the partial apply context. */
|
|
/*****************************************************************************/
|
|
|
|
struct SwiftClassPair { var x: SwiftClass, y: SwiftClass }
|
|
|
|
sil public_external @guaranteed_captured_class_pair_param : $@convention(thin) (Int, @guaranteed SwiftClassPair) -> Int
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_guaranteed_class_pair_param(ptr %0, ptr %1)
|
|
// CHECK: [[CONTEXT_OBJ:%.*]] = call noalias ptr @swift_allocObject
|
|
// CHECK: [[PAIR_ADDR:%.*]] = getelementptr
|
|
// CHECK-NEXT: [[X_ADDR:%.*]] = getelementptr inbounds{{.*}} %T13partial_apply14SwiftClassPairV, ptr [[PAIR_ADDR]], i32 0, i32 0
|
|
// CHECK-NEXT: store ptr %0, ptr [[X_ADDR]], align
|
|
// CHECK-NEXT: [[Y_ADDR:%.*]] = getelementptr inbounds{{.*}} %T13partial_apply14SwiftClassPairV, ptr [[PAIR_ADDR]], i32 0, i32 1
|
|
// CHECK-NEXT: store ptr %1, ptr [[Y_ADDR]], align
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[T0:%.*]] = insertvalue {{.*}} [[PARTIAL_APPLY_FORWARDER:@"\$s[A-Za-z0-9_]+TA"]], {{.*}} [[CONTEXT_OBJ]]
|
|
// CHECK: ret {{.*}} [[T0]]
|
|
|
|
// CHECK: define internal swiftcc i64 [[PARTIAL_APPLY_FORWARDER]]
|
|
// CHECK: [[PAIR_ADDR:%.*]] = getelementptr
|
|
// CHECK-NEXT: [[X_ADDR:%.*]] = getelementptr inbounds{{.*}} %T13partial_apply14SwiftClassPairV, ptr [[PAIR_ADDR]], i32 0, i32 0
|
|
// CHECK-NEXT: [[X:%.*]] = load ptr, ptr [[X_ADDR]], align
|
|
// CHECK-NEXT: [[Y_ADDR:%.*]] = getelementptr inbounds{{.*}} %T13partial_apply14SwiftClassPairV, ptr [[PAIR_ADDR]], i32 0, i32 1
|
|
// CHECK-NEXT: [[Y:%.*]] = load ptr, ptr [[Y_ADDR]], align
|
|
// CHECK-NOT: retain
|
|
// CHECK: [[RESULT:%.*]] = call swiftcc i64 @guaranteed_captured_class_pair_param(i64 %0, ptr [[X]], ptr [[Y]])
|
|
// CHECK: release{{.*}}%1)
|
|
// CHECK: ret i64 [[RESULT]]
|
|
|
|
sil @partial_apply_guaranteed_class_pair_param : $@convention(thin) (@owned SwiftClassPair) -> @callee_owned (Int) -> Int {
|
|
bb0(%x : $SwiftClassPair):
|
|
%f = function_ref @guaranteed_captured_class_pair_param : $@convention(thin) (Int, @guaranteed SwiftClassPair) -> Int
|
|
%p = partial_apply %f(%x) : $@convention(thin) (Int, @guaranteed SwiftClassPair) -> Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
sil public_external @indirect_guaranteed_captured_class_pair_param : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_indirect_guaranteed_class_pair_param(ptr noalias captures(none) dereferenceable({{.*}}) %0)
|
|
// CHECK: [[CONTEXT_OBJ:%.*]] = call noalias ptr @swift_allocObject
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[T0:%.*]] = insertvalue {{.*}} [[PARTIAL_APPLY_FORWARDER:@"\$s[A-Za-z0-9_]+TA"]], {{.*}} [[CONTEXT_OBJ]]
|
|
// CHECK: ret {{.*}} [[T0]]
|
|
|
|
// CHECK: define internal swiftcc i64 [[PARTIAL_APPLY_FORWARDER]]
|
|
// CHECK: [[PAIR_ADDR:%.*]] = getelementptr
|
|
// CHECK-NOT: load
|
|
// CHECK-NOT: retain
|
|
// CHECK: [[RESULT:%.*]] = call swiftcc i64 @indirect_guaranteed_captured_class_pair_param(i64 %0, ptr noalias captures(none) dereferenceable({{.*}}) [[PAIR_ADDR]])
|
|
// CHECK: release{{.*}}%1)
|
|
// CHECK: ret i64 [[RESULT]]
|
|
|
|
sil @partial_apply_indirect_guaranteed_class_pair_param : $@convention(thin) (@in SwiftClassPair) -> @callee_owned (Int) -> Int {
|
|
bb0(%x : $*SwiftClassPair):
|
|
%f = function_ref @indirect_guaranteed_captured_class_pair_param : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
%p = partial_apply %f(%x) : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
sil public_external @indirect_consumed_captured_class_pair_param : $@convention(thin) (Int, @in SwiftClassPair) -> Int
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_indirect_consumed_class_pair_param(ptr noalias captures(none) dereferenceable({{.*}}) %0)
|
|
// CHECK: [[CONTEXT_OBJ:%.*]] = call noalias ptr @swift_allocObject
|
|
// CHECK-NOT: {{retain|release}}
|
|
// CHECK: [[T0:%.*]] = insertvalue {{.*}} [[PARTIAL_APPLY_FORWARDER:@"\$s[A-Za-z0-9_]+TA"]], {{.*}} [[CONTEXT_OBJ]]
|
|
// CHECK: ret {{.*}} [[T0]]
|
|
|
|
// CHECK: define internal swiftcc i64 [[PARTIAL_APPLY_FORWARDER]]
|
|
// CHECK: [[X_TMP:%.*]] = alloca
|
|
// CHECK: call ptr @"$s13partial_apply14SwiftClassPairVWOc"
|
|
// CHECK: release{{.*}}%1)
|
|
// CHECK: [[RESULT:%.*]] = call swiftcc i64 @indirect_consumed_captured_class_pair_param(i64 %0, ptr noalias captures(none) dereferenceable({{.*}}) [[X_TMP]])
|
|
// CHECK: ret i64 [[RESULT]]
|
|
|
|
sil @partial_apply_indirect_consumed_class_pair_param : $@convention(thin) (@in SwiftClassPair) -> @callee_owned (Int) -> Int {
|
|
bb0(%x : $*SwiftClassPair):
|
|
%f = function_ref @indirect_consumed_captured_class_pair_param : $@convention(thin) (Int, @in SwiftClassPair) -> Int
|
|
%p = partial_apply %f(%x) : $@convention(thin) (Int, @in SwiftClassPair) -> Int
|
|
return %p : $@callee_owned (Int) -> Int
|
|
}
|
|
|
|
sil public_external @captured_fixed_and_dependent_params : $@convention(thin) <A> (@guaranteed SwiftClass, @in A, Int) -> ()
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_indirect_non_fixed_layout(ptr %0, ptr noalias %1, i64 %2, ptr %T)
|
|
// -- Round the base offset for the T field up to T's alignment.
|
|
// CHECK: [[T_VWTABLE_ADDR:%.*]] = getelementptr {{.*}} %T, [[WORD:i[0-9]+]] -1
|
|
// CHECK: [[T_VWTABLE:%.*]] = load {{.*}} [[T_VWTABLE_ADDR]]
|
|
// CHECK: [[T_FLAGS_ADDR:%.*]] = getelementptr inbounds{{.*}} %swift.vwtable, ptr [[T_VWTABLE]], i32 0, i32 10
|
|
// CHECK: [[T_FLAGS:%.*]] = load i32, ptr [[T_FLAGS_ADDR]]
|
|
// CHECK-64: [[T0:%.*]] = zext i32 [[T_FLAGS]] to i64
|
|
// CHECK-64: [[T_ALIGN_MASK:%.*]] = and i64 [[T0]], 255
|
|
// CHECK-32: [[T_ALIGN_MASK:%.*]] = and i32 [[T_FLAGS]], 255
|
|
// CHECK-16: [[T0:%.*]] = trunc i32 [[T_FLAGS]] to i16
|
|
// CHECK-16: [[T_ALIGN_MASK:%.*]] = and i16 [[T0]], 255
|
|
// CHECK: [[T_ALIGN_MASK_NOT:%.*]] = xor [[WORD]] [[T_ALIGN_MASK]], -1
|
|
// -- 32 is 64-bit offset of 'T' field, 16 for obj header + 8 for T metadata + 8 for SwiftClass field
|
|
// CHECK-64: [[T_UP_TO_ALIGN_1:%.*]] = add i64 32, [[T_ALIGN_MASK]]
|
|
// -- 16 is 32-bit
|
|
// CHECK-32: [[T_UP_TO_ALIGN_1:%.*]] = add i32 16, [[T_ALIGN_MASK]]
|
|
// CHECK: [[T_OFFSET:%.*]] = and [[WORD]] [[T_UP_TO_ALIGN_1]], [[T_ALIGN_MASK_NOT]]
|
|
|
|
// -- Add the size of T to start the Int field.
|
|
// CHECK: [[T_SIZE_ADDR:%.*]] = getelementptr inbounds{{.*}} %swift.vwtable, ptr [[T_VWTABLE]], i32 0, i32 8
|
|
// CHECK: [[T_SIZE:%.*]] = load [[WORD]], ptr [[T_SIZE_ADDR]]
|
|
// CHECK: [[T_END:%.*]] = add [[WORD]] [[T_OFFSET]], [[T_SIZE]]
|
|
|
|
// -- Accumulate total alignment.
|
|
// CHECK: [[TOTAL_ALIGN_1:%.*]] = or [[WORD]] 7, [[T_ALIGN_MASK]]
|
|
|
|
// -- Round up to alignment for the Int field.
|
|
// TODO: could skip this since the best-known alignment is better than
|
|
// fixed alignment of Int
|
|
// CHECK-64: [[INT_UP_TO_ALIGN_1:%.*]] = add [[WORD]] [[T_END]], 7
|
|
// CHECK-64: [[INT_OFFSET:%.*]] = and [[WORD]] [[INT_UP_TO_ALIGN_1]], -8
|
|
// CHECK-32: [[INT_UP_TO_ALIGN_1:%.*]] = add [[WORD]] [[T_END]], 3
|
|
// CHECK-32: [[INT_OFFSET:%.*]] = and [[WORD]] [[INT_UP_TO_ALIGN_1]], -4
|
|
// -- Add the size of Int, and accumulate total alignment.
|
|
// CHECK-64: [[TOTAL_SIZE:%.*]] = add [[WORD]] [[INT_OFFSET]], 8
|
|
// CHECK-64: [[TOTAL_ALIGN:%.*]] = or [[WORD]] [[TOTAL_ALIGN_1]], 7
|
|
// CHECK-32: [[TOTAL_SIZE:%.*]] = add [[WORD]] [[INT_OFFSET]], 4
|
|
// CHECK-32: [[TOTAL_ALIGN:%.*]] = or [[WORD]] [[TOTAL_ALIGN_1]], 3
|
|
|
|
// -- Allocate using the total size and alignment.
|
|
// CHECK: [[BOX:%.*]] = call noalias ptr @swift_allocObject(ptr {{.*}}, [[WORD]] [[TOTAL_SIZE]], [[WORD]] [[TOTAL_ALIGN]])
|
|
// -- metadata
|
|
// CHECK: getelementptr inbounds {{.*}} [[BOX]], i32 0, i32 1
|
|
// -- SwiftClass
|
|
// CHECK: getelementptr inbounds {{.*}} [[BOX]], i32 0, i32 2
|
|
// -- T
|
|
// CHECK: getelementptr inbounds {{.*}} [[BOX]], [[WORD]] [[T_OFFSET]]
|
|
// -- Int
|
|
// CHECK: getelementptr inbounds {{.*}} [[BOX]], [[WORD]] [[INT_OFFSET]]
|
|
|
|
// CHECK: insertvalue {{.*}} [[PARTIAL_APPLY_STUB:@"\$s[A-Za-z0-9_]+TA"]]
|
|
// CHECK: define internal swiftcc void [[PARTIAL_APPLY_STUB]](ptr swiftself %0)
|
|
sil @partial_apply_indirect_non_fixed_layout : $@convention(thin) <T> (@owned SwiftClass, @in T, Int) -> @callee_owned () -> () {
|
|
bb0(%a : $SwiftClass, %b : $*T, %c : $Int):
|
|
%f = function_ref @captured_fixed_and_dependent_params : $@convention(thin) <B> (@guaranteed SwiftClass, @in B, Int) -> ()
|
|
%p = partial_apply %f<T>(%a, %b, %c) : $@convention(thin) <C> (@guaranteed SwiftClass, @in C, Int) -> ()
|
|
return %p : $@callee_owned () -> ()
|
|
}
|
|
|
|
sil public_external @captured_dependent_out_param : $@convention(thin) <A> (@in A) -> @out A
|
|
|
|
sil @partial_apply_with_out_param : $@convention(thin) <T> (@in T) -> @callee_owned () -> @out T {
|
|
bb0(%x : $*T):
|
|
%f = function_ref @captured_dependent_out_param : $@convention(thin) <B> (@in B) -> @out B
|
|
%p = partial_apply %f<T>(%x) : $@convention(thin) <C> (@in C) -> @out C
|
|
return %p : $@callee_owned () -> @out T
|
|
}
|
|
|
|
// CHECK-LABEL: define internal swiftcc void @"$s28captured_dependent_out_paramTA"(ptr noalias sret({{.*}}) %0, ptr swiftself %1) {{.*}} {
|
|
// CHECK: call swiftcc void @captured_dependent_out_param(ptr noalias sret({{.*}})
|
|
|
|
sil @partial_apply_dynamic_with_out_param : $@convention(thin) <T> (Int32, @owned @callee_owned (Int32) -> @out T) -> @callee_owned () -> @out T {
|
|
bb0(%x : $Int32, %f : $@callee_owned (Int32) -> @out T):
|
|
%p = partial_apply %f(%x) : $@callee_owned (Int32) -> @out T
|
|
return %p : $@callee_owned () -> @out T
|
|
}
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc { ptr, ptr } @partial_apply_dynamic_with_out_param
|
|
// CHECK: insertvalue {{.*}} [[FORWARDER:@"\$sTA[A-Za-z0-9_]*"]]
|
|
// CHECK: define internal swiftcc void [[FORWARDER]]
|
|
// CHECK: call swiftcc void {{%.*}}(ptr noalias sret({{.*}})
|
|
|
|
class Base {
|
|
}
|
|
sil_vtable Base {}
|
|
|
|
class Sub : Base {
|
|
}
|
|
|
|
sil_vtable Sub {}
|
|
|
|
sil @parametric_casting_closure : $@convention(thin) <C where C : Base> (@guaranteed Base) -> @owned C {
|
|
bb0(%0 : $Base):
|
|
%1 = unconditional_checked_cast %0 : $Base to C
|
|
return %1 : $C
|
|
}
|
|
|
|
sil public_external @receive_closure : $@convention(thin) <C where C : Base> (@owned @callee_owned () -> (@owned C)) -> ()
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @test_partial_apply(ptr %0)
|
|
// CHECK: [[TMP:%.*]] = call swiftcc %swift.metadata_response @"$s13partial_apply3SubCMa"(i64 0)
|
|
// CHECK: [[TYPE:%.*]] = extractvalue %swift.metadata_response [[TMP]], 0
|
|
// CHECK: call swiftcc void @receive_closure(ptr @"$s26parametric_casting_closureTA.{{[0-9]+}}", ptr %0, ptr [[TYPE]])
|
|
|
|
// CHECK-LABEL: define internal swiftcc ptr @"$s26parametric_casting_closureTA.{{[0-9]+}}"(ptr swiftself %0)
|
|
// CHECK: [[TMP:%.*]] = call swiftcc %swift.metadata_response @"$s13partial_apply3SubCMa"(i64 0)
|
|
// CHECK: [[TYPE:%.*]] = extractvalue %swift.metadata_response [[TMP]], 0
|
|
// CHECK: [[CALL:%.*]] = call swiftcc ptr @parametric_casting_closure(ptr %0, ptr [[TYPE]])
|
|
// CHECK: call void @swift_release(ptr %0)
|
|
// CHECK: ret ptr [[CALL]]
|
|
|
|
sil @test_partial_apply : $@convention(thin) (@guaranteed Base) -> () {
|
|
bb0(%0 : $Base):
|
|
%1 = function_ref @parametric_casting_closure : $@convention(thin) <C where C : Base> (@guaranteed Base) -> @owned C
|
|
// dummy partial apply to test subsequent mangling
|
|
%6 = partial_apply %1<Sub>() : $@convention(thin) <C where C : Base> (@guaranteed Base) -> @owned C
|
|
%2 = partial_apply %1<Sub>(%0) : $@convention(thin) <C where C : Base> (@guaranteed Base) -> @owned C
|
|
%3 = function_ref @receive_closure : $@convention(thin) <C where C : Base> (@owned @callee_owned () -> (@owned C)) -> ()
|
|
%4 = apply %3<Sub>(%2) : $@convention(thin) <C where C : Base> (@owned @callee_owned () -> (@owned C)) -> ()
|
|
%5 = tuple ()
|
|
return %5 : $()
|
|
}
|
|
|
|
sil public_external @partial_empty_box : $@convention(thin) (@guaranteed <τ_0_0> { var τ_0_0 } <()>, @inout_aliasable ()) -> ()
|
|
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @empty_box()
|
|
sil @empty_box : $@convention(thin) () -> () {
|
|
entry:
|
|
// CHECK: [[BOX:%.*]] = call {{.*}}swift_allocEmptyBox
|
|
// CHECK: store ptr [[BOX]]
|
|
// CHECK: store ptr undef
|
|
%b = alloc_box $<τ_0_0> { var τ_0_0 } <()>
|
|
%ba = project_box %b : $<τ_0_0> { var τ_0_0 } <()>, 0
|
|
%f = function_ref @partial_empty_box : $@convention(thin) (@guaranteed <τ_0_0> { var τ_0_0 } <()>, @inout_aliasable ()) -> ()
|
|
%g = partial_apply %f(%b, %ba) : $@convention(thin) (@guaranteed <τ_0_0> { var τ_0_0 } <()>, @inout_aliasable ()) -> ()
|
|
return undef : $()
|
|
}
|
|
|
|
protocol P0 {}
|
|
protocol P1 { associatedtype X : P0 }
|
|
protocol P2 { associatedtype Y : P1 }
|
|
|
|
sil hidden_external @complex_generic_function : $@convention(thin) <T where T : P2, T.Y : P2> (Int) -> ()
|
|
|
|
sil @partial_apply_complex_generic_function : $@convention(thin) <T where T : P2, T.Y : P2> (Int) -> () {
|
|
bb0(%0 : $Int):
|
|
%fn = function_ref @complex_generic_function : $@convention(thin) <T where T : P2, T.Y : P2> (Int) -> ()
|
|
%pa = partial_apply %fn <T>(%0) : $@convention(thin) <T where T : P2, T.Y : P1, T.Y : P2> (Int) -> ()
|
|
%result = tuple ()
|
|
return %result : $()
|
|
}
|
|
// CHECK-LABEL: define{{( dllexport)?}}{{( protected)?}} swiftcc void @partial_apply_complex_generic_function(i64 %0, ptr %T, ptr %T.P2, ptr %T.Y.P2)
|
|
// CHECK: [[T0:%.*]] = call noalias ptr @swift_allocObject(ptr {{.*}}, i64 48, i64 7)
|
|
// CHECK-NEXT: [[BUFFER:%.*]] = getelementptr inbounds{{.*}} <{ %swift.refcounted, [24 x i8], %TSi }>, ptr [[T0]], i32 0, i32 1
|
|
// CHECK-NEXT: store ptr %T, ptr [[BUFFER]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds ptr, ptr [[BUFFER]], i32 1
|
|
// CHECK-NEXT: store ptr %T.P2, ptr [[T0]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds ptr, ptr [[BUFFER]], i32 2
|
|
// CHECK-NEXT: store ptr %T.Y.P2, ptr [[T0]], align 8
|
|
|
|
struct ComplexBoundedType<T: P2> {}
|
|
|
|
// https://github.com/apple/swift/issues/43513
|
|
// Ensure that a 'partial_apply' which captures bound generic type metadata
|
|
// doesn't crash when restoring the generic context.
|
|
|
|
sil hidden_external @generic_function : $@convention(thin) <T> () -> ()
|
|
sil @partial_apply_with_generic_type : $@convention(thin) <U: P2> () -> () {
|
|
bb0:
|
|
%fn = function_ref @generic_function : $@convention(thin) <T> () -> ()
|
|
%pa = partial_apply %fn <ComplexBoundedType<U>>() : $@convention(thin) <T> () -> ()
|
|
%result = tuple ()
|
|
return %result : $()
|
|
}
|
|
|
|
// Crash on partial apply of witness_method without generic signature
|
|
|
|
extension Int: P0 {}
|
|
|
|
sil hidden_external @concrete_witness_method : $@convention(witness_method: P0) (Int, Int) -> ()
|
|
|
|
sil hidden @partial_apply_witness_method : $@convention(thin) (Int) -> () {
|
|
bb0(%0 : $Int):
|
|
%fn = function_ref @concrete_witness_method : $@convention(witness_method: P0) (Int, Int) -> ()
|
|
%pa = partial_apply %fn (%0) : $@convention(witness_method: P0) (Int, Int) -> ()
|
|
%result = tuple ()
|
|
return %result : $()
|
|
}
|
|
|
|
|
|
// Crash on partial apply of a generic enum.
|
|
enum GenericEnum<T> {
|
|
case X(String)
|
|
case Y(T, T, T, T, T)
|
|
}
|
|
sil public_external @generic_indirect_return : $@convention(thin) <T> (Int) -> @owned GenericEnum<T>
|
|
|
|
// CHECK-LABEL: define{{.*}} @partial_apply_generic_indirect_return
|
|
// CHECK: insertvalue {{.*}}$s23generic_indirect_returnTA
|
|
|
|
// CHECK-LABEL: define internal swiftcc void @"$s23generic_indirect_returnTA"(ptr noalias sret({{.*}}) captures(none) %0, ptr swiftself
|
|
// CHECK: call swiftcc void @generic_indirect_return({{.*}} %0,
|
|
// CHECK: ret void
|
|
sil @partial_apply_generic_indirect_return : $@convention(thin) (Int) -> @callee_owned () -> @owned GenericEnum<Int> {
|
|
bb0(%0 : $Int):
|
|
%fn = function_ref @generic_indirect_return :$@convention(thin) <T> (Int) -> @owned GenericEnum<T>
|
|
%pa = partial_apply %fn<Int> (%0) : $@convention(thin) <T> (Int) -> @owned GenericEnum<T>
|
|
return %pa : $@callee_owned () -> @owned GenericEnum<Int>
|
|
|
|
}
|
|
|
|
// Crash on partial apply of a generic enum.
|
|
enum GenericEnum2<T> {
|
|
case X(String)
|
|
case Y(T)
|
|
}
|
|
sil public_external @generic_indirect_return2 : $@convention(thin) <T> (Int) -> @owned GenericEnum2<T>
|
|
|
|
// CHECK-LABEL: define{{.*}} @partial_apply_generic_indirect_return2
|
|
// CHECK: [[CTX:%.]] = call noalias ptr @swift_allocObject
|
|
// CHECK: store {{.*}}$s24generic_indirect_return2TA
|
|
// CHECK: store ptr [[CTX]]
|
|
// CHECK: [[FN:%.*]] = load ptr
|
|
// CHECK: [[CTX2:%.*]] = load ptr
|
|
// CHECK: [[R1:%.]] = insertvalue { ptr, ptr } undef, ptr [[FN]], 0
|
|
// CHECK: [[R2:%.*]] = insertvalue { ptr, ptr } [[R1]], ptr [[CTX2]], 1
|
|
// CHECK: ret { ptr, ptr } [[R2]]
|
|
|
|
// CHECK-LABEL: define internal swiftcc void @"$s24generic_indirect_return2TA"(ptr noalias sret({{.*}}) captures(none) %0, ptr swiftself %1)
|
|
// CHECK: call swiftcc void @generic_indirect_return2(ptr noalias sret({{.*}}) %0,
|
|
// CHECK: ret void
|
|
sil @partial_apply_generic_indirect_return2 : $@convention(thin) (Int) -> @callee_owned () -> @owned GenericEnum2<Int> {
|
|
bb0(%0 : $Int):
|
|
%fn = function_ref @generic_indirect_return2 :$@convention(thin) <T> (Int) -> @owned GenericEnum2<T>
|
|
%pa = partial_apply %fn<Int> (%0) : $@convention(thin) <T> (Int) -> @owned GenericEnum2<T>
|
|
return %pa : $@callee_owned () -> @owned GenericEnum2<Int>
|
|
}
|
|
|
|
struct SwiftStruct {}
|
|
|
|
sil @fun : $@convention(thin) (@thin SwiftStruct.Type, @guaranteed SwiftClass) -> ()
|
|
|
|
// CHECK-LABEL: define{{.*}} swiftcc { ptr, ptr } @partial_apply_thin_type(ptr %0)
|
|
// CHECK: [[CLOSURE:%.*]] = insertvalue { ptr, ptr } { ptr @"$s3funTA", ptr undef }, ptr %0, 1
|
|
// CHECK: ret { ptr, ptr } [[CLOSURE]]
|
|
|
|
sil @partial_apply_thin_type : $@convention(thin) (@thin SwiftStruct.Type, @owned SwiftClass) -> @callee_owned () -> () {
|
|
entry(%0: $@thin SwiftStruct.Type, %1: $SwiftClass):
|
|
%fun = function_ref @fun : $@convention(thin) (@thin SwiftStruct.Type, @guaranteed SwiftClass) -> ()
|
|
%closure = partial_apply %fun (%0, %1) : $@convention(thin) (@thin SwiftStruct.Type, @guaranteed SwiftClass) -> ()
|
|
return %closure : $@callee_owned () -> ()
|
|
}
|
|
|
|
sil @afun : $@convention(thin) (Int) -> @error Error
|
|
|
|
// Check that we don't assert on a thin noescape function.
|
|
// CHECK-LABEL: define{{.*}} swiftcc void @convert_thin_test
|
|
// CHECK: call swiftcc void @afun(i64 {{.*}}, ptr swiftself undef
|
|
// CHECK: ret void
|
|
sil @convert_thin_test : $@convention(thin) (Int) -> () {
|
|
bb(%0 : $Int):
|
|
%f = function_ref @afun : $@convention(thin) (Int) -> @error Error
|
|
%c = convert_function %f : $@convention(thin) (Int) -> @error Error to $@convention(thin) @noescape (Int) -> @error Error
|
|
try_apply %c(%0) : $@convention(thin) @noescape (Int) -> @error Error, normal bb2, error bb1
|
|
|
|
bb1(%err: $Error):
|
|
%t = tuple ()
|
|
br bb3(%t: $())
|
|
|
|
bb2(%r : $()):
|
|
br bb3(%r : $())
|
|
|
|
bb3(%v : $()):
|
|
return %v : $()
|
|
}
|
|
|
|
struct A1 {
|
|
let b: () -> ()
|
|
}
|
|
|
|
struct A2<T> {
|
|
let a: T
|
|
}
|
|
|
|
class A3 {}
|
|
|
|
sil @amethod : $@convention(method) (@in_guaranteed A2<A3>) -> (@owned A1, @error Error)
|
|
|
|
sil @repo : $@convention(thin) (@in_guaranteed A2<A3>) -> @owned @callee_guaranteed () -> (@owned A1, @error Error) {
|
|
bb0(%0 : $*A2<A3>):
|
|
%1 = load %0 : $*A2<A3>
|
|
%2 = alloc_stack $A2<A3>
|
|
store %1 to %2 : $*A2<A3>
|
|
%4 = function_ref @amethod : $@convention(method) (@in_guaranteed A2<A3>) -> (@owned A1, @error Error)
|
|
%5 = partial_apply [callee_guaranteed] %4(%2) : $@convention(method) (@in_guaranteed A2<A3>) -> (@owned A1, @error Error)
|
|
dealloc_stack %2 : $*A2<A3>
|
|
return %5 : $@callee_guaranteed () -> (@owned A1, @error Error)
|
|
}
|
|
|
|
sil @capture_class : $@convention(thin) (@guaranteed A3) -> ()
|
|
|
|
// CHECK-LABEL: define{{.*}} swiftcc ptr @partial_apply_stack_in_coroutine(ptr {{.*}} %0, ptr %1)
|
|
// CHECK: entry:
|
|
// CHECK: call swiftcc void @"$s13capture_classTA"(ptr swiftself %1)
|
|
sil @partial_apply_stack_in_coroutine : $@yield_once (@owned A3) -> () {
|
|
entry(%0: $A3):
|
|
%f = function_ref @capture_class : $@convention(thin) (@guaranteed A3) -> ()
|
|
%p = partial_apply [callee_guaranteed] [on_stack] %f(%0) : $@convention(thin) (@guaranteed A3) -> ()
|
|
apply %p() : $@noescape @callee_guaranteed () -> ()
|
|
dealloc_stack %p : $@noescape @callee_guaranteed () -> ()
|
|
%1000 = integer_literal $Builtin.Int32, 1000
|
|
yield (), resume resume, unwind unwind
|
|
|
|
resume:
|
|
%ret = tuple ()
|
|
return %ret : $()
|
|
|
|
unwind:
|
|
unwind
|
|
}
|
|
sil_vtable A3 {}
|
|
|
|
|
|
// CHECK-LABEL: define{{.*}} swiftcc { ptr, ptr } @partial_apply_callee_guaranteed_indirect_guaranteed_class_pair_param
|
|
// CHECK-NOT: ret
|
|
// CHECK: call void @llvm.memcpy
|
|
// CHECK: insertvalue { ptr, ptr } { ptr [[FORWARDER:@[^,]*]],
|
|
// CHECK: ret
|
|
// CHECK: define{{.*}} swiftcc i64 [[FORWARDER]](i64 %0, ptr swiftself %1)
|
|
// CHECK: entry:
|
|
// CHECK: [[FIELD:%.*]] = getelementptr inbounds {{.*}}, ptr %1, i32 0, i32 1
|
|
// CHECK: call swiftcc i64 @indirect_guaranteed_captured_class_pair_param(i64 %0, ptr {{.*}} [[FIELD]])
|
|
// CHECK: ret
|
|
// CHECK: }
|
|
|
|
sil @partial_apply_callee_guaranteed_indirect_guaranteed_class_pair_param : $@convention(thin) (@in SwiftClassPair) -> @owned @callee_guaranteed (Int) -> Int {
|
|
bb0(%x : $*SwiftClassPair):
|
|
%f = function_ref @indirect_guaranteed_captured_class_pair_param : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
%p = partial_apply [callee_guaranteed] %f(%x) : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
return %p : $@callee_guaranteed(Int) -> (Int)
|
|
}
|
|
|
|
sil public_external @use_closure2 : $@convention(thin) (@noescape @callee_guaranteed (Int) -> Int) -> ()
|
|
|
|
// CHECK-LABEL: define{{.*}} swiftcc void @partial_apply_stack_callee_guaranteed_indirect_guaranteed_class_pair_param(ptr {{.*}} %0)
|
|
// CHECK: [[CLOSURE_STACK_ADDR:%.*]] = getelementptr inbounds{{.*}} <{ %swift.refcounted, ptr }>, ptr {{.*}}, i32 0, i32 1
|
|
// CHECK: store ptr %0, ptr [[CLOSURE_STACK_ADDR]]
|
|
// CHECK: call swiftcc void @use_closure2(ptr [[FORWARDER:@[^,]*]], ptr {{.*}})
|
|
// CHECK: ret void
|
|
|
|
// CHECK: define{{.*}} swiftcc i64 [[FORWARDER]](i64 %0, ptr swiftself %1)
|
|
// CHECK: entry:
|
|
// CHECK: [[ADDR:%.*]] = load ptr, ptr
|
|
// CHECK: [[RES:%.*]] = tail call swiftcc i64 @indirect_guaranteed_captured_class_pair_param(i64 %0, ptr {{.*}} [[ADDR]])
|
|
// CHECK: ret i64 [[RES]]
|
|
// CHECK: }
|
|
|
|
sil @partial_apply_stack_callee_guaranteed_indirect_guaranteed_class_pair_param : $@convention(thin) (@in_guaranteed SwiftClassPair) -> () {
|
|
bb0(%x : $*SwiftClassPair):
|
|
%f = function_ref @indirect_guaranteed_captured_class_pair_param : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
%p = partial_apply [callee_guaranteed] [on_stack] %f(%x) : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
%u = function_ref @use_closure2 : $@convention(thin) (@noescape @callee_guaranteed (Int) -> Int) -> ()
|
|
%r = apply %u(%p) : $@convention(thin) (@noescape @callee_guaranteed (Int) -> Int) -> ()
|
|
dealloc_stack %p : $@noescape @callee_guaranteed (Int) ->(Int)
|
|
%t = tuple()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK: define{{.*}} swiftcc void @partial_apply_stack_callee_guaranteed_indirect_in_class_pair_param(ptr {{.*}} %0)
|
|
// CHECK: [[CLOSURE_STACK_ADDR:%.*]] = getelementptr inbounds{{.*}} <{ %swift.refcounted, ptr }>, ptr {{.*}}, i32 0, i32 1
|
|
// CHECK: store ptr %0, ptr [[CLOSURE_STACK_ADDR]]
|
|
// CHECK: call swiftcc void @use_closure2(ptr [[FORWARDER:@[^,]*]], ptr {{.*}})
|
|
// CHECK: ret void
|
|
|
|
// CHECK: define{{.*}} swiftcc i64 [[FORWARDER]](i64 %0, ptr swiftself %1)
|
|
// CHECK: entry:
|
|
// CHECK: [[VALUE_ADDR:%.*]] = load ptr, ptr {{.*}}
|
|
// CHECK: [[RES:%.*]] = tail call swiftcc i64 @indirect_in_captured_class_pair_param(i64 %0, ptr {{.*}} [[VALUE_ADDR]])
|
|
// CHECK: ret i64 [[RES]]
|
|
|
|
sil public_external @indirect_in_captured_class_pair_param : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
|
|
sil @partial_apply_stack_callee_guaranteed_indirect_in_class_pair_param : $@convention(thin) (@in SwiftClassPair) -> () {
|
|
bb0(%x : $*SwiftClassPair):
|
|
%f = function_ref @indirect_in_captured_class_pair_param : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
%p = partial_apply [callee_guaranteed] [on_stack] %f(%x) : $@convention(thin) (Int, @in_guaranteed SwiftClassPair) -> Int
|
|
%u = function_ref @use_closure2 : $@convention(thin) (@noescape @callee_guaranteed (Int) -> Int) -> ()
|
|
%r = apply %u(%p) : $@convention(thin) (@noescape @callee_guaranteed (Int) -> Int) -> ()
|
|
dealloc_stack %p : $@noescape @callee_guaranteed (Int) ->(Int)
|
|
destroy_addr %x: $*SwiftClassPair
|
|
%t = tuple()
|
|
return %t : $()
|
|
}
|
|
|
|
|
|
sil public_external @closure : $@convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> ()
|
|
|
|
// Make sure that we use the heap header size (16) for the initial offset.
|
|
// CHECK-LABEL: define{{.*}} swiftcc void @test_initial_offset(ptr noalias %0, ptr %1)
|
|
// CHECK: [[T0:%.*]] = call swiftcc %swift.metadata_response @"$s16resilient_struct12ResilientIntVMa"
|
|
// CHECK: [[MD:%.*]] = extractvalue %swift.metadata_response [[T0]], 0
|
|
// CHECK: [[VWT_PTR:%.*]] = getelementptr inbounds ptr, ptr [[MD]], i64 -1
|
|
// CHECK: [[VWT:%.*]] = load ptr, ptr [[VWT_PTR]]
|
|
// CHECK: [[FLAGS_PTR:%.*]] = getelementptr inbounds{{.*}} %swift.vwtable, ptr [[VWT]], i32 0, i32 10
|
|
// CHECK: [[FLAGS:%.*]] = load i32, ptr [[FLAGS_PTR]]
|
|
// CHECK: [[FLAGS2:%.*]] = zext i32 [[FLAGS]] to i64
|
|
// CHECK: [[ALIGNMASK:%.*]] = and i64 [[FLAGS2]], 255
|
|
// CHECK: = xor i64 [[ALIGNMASK]], -1
|
|
// CHECK: = add i64 16, [[ALIGNMASK]]
|
|
|
|
sil @test_initial_offset : $@convention(thin) (@in ResilientInt, @guaranteed SwiftClass) -> () {
|
|
bb0(%x : $*ResilientInt, %y : $SwiftClass):
|
|
%f = function_ref @closure : $@convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> ()
|
|
%p = partial_apply [callee_guaranteed] %f(%x, %y) : $@convention(thin) (@in_guaranteed ResilientInt, @guaranteed SwiftClass) -> ()
|
|
release_value %p : $@callee_guaranteed () ->()
|
|
%t = tuple()
|
|
return %t : $()
|
|
}
|
|
|
|
protocol Proto1 {}
|
|
protocol Proto2 {}
|
|
struct EmptyType : Proto1 { }
|
|
|
|
struct SomeType : Proto2 {
|
|
var d : ResilientInt // some resilient type
|
|
var x : Int
|
|
}
|
|
|
|
sil @foo : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : Proto1, τ_0_1 : Proto2> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> ()
|
|
|
|
// CHECK-64-LABEL: define{{.*}} swiftcc void @empty_followed_by_non_fixed(ptr noalias %0)
|
|
// CHECK-64: [[FLAGS:%.*]] = load i32, ptr
|
|
// CHECK-64: [[FLAGS2:%.*]] = zext i32 [[FLAGS]] to i64
|
|
// CHECK-64: [[ALIGNMASK:%.*]] = and i64 [[FLAGS2]], 255
|
|
// CHECK-64: [[NOTALIGNMASK:%.*]] = xor i64 [[ALIGNMASK]], -1
|
|
// Make sure we take the header offset (16) into account.
|
|
// CHECK-64: [[TMP:%.*]] = add i64 16, [[ALIGNMASK]]
|
|
// CHECK-64: [[OFFSET:%.*]] = and i64 [[TMP]], [[NOTALIGNMASK]]
|
|
// CHECK-64: [[CONTEXT:%.*]] = call noalias ptr @swift_allocObject
|
|
// CHECK-64: [[GEP:%.*]] = getelementptr inbounds i8, ptr [[CONTEXT]], i64 [[OFFSET]]
|
|
// CHECK-64: call ptr @"$s13partial_apply8SomeTypeVWOb"(ptr {{.*}}, ptr [[GEP]])
|
|
|
|
sil @empty_followed_by_non_fixed : $@convention(thin) (EmptyType, @in_guaranteed SomeType) -> () {
|
|
entry(%0 : $EmptyType, %1: $*SomeType):
|
|
%5 = alloc_stack $EmptyType
|
|
store %0 to %5 : $*EmptyType
|
|
%31 = function_ref @foo : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : Proto1, τ_0_1 : Proto2> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> ()
|
|
%32 = alloc_stack $EmptyType
|
|
copy_addr %5 to [init] %32 : $*EmptyType
|
|
%34 = alloc_stack $SomeType
|
|
copy_addr %1 to [init] %34 : $*SomeType // id: %35
|
|
%36 = partial_apply [callee_guaranteed] %31<EmptyType, SomeType>(%32, %34) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 : Proto1, τ_0_1 : Proto2> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1) -> ()
|
|
release_value %36: $@callee_guaranteed () ->()
|
|
dealloc_stack %34 : $*SomeType
|
|
dealloc_stack %32 : $*EmptyType
|
|
dealloc_stack %5 : $*EmptyType
|
|
%40 = tuple()
|
|
return %40 : $()
|
|
}
|
|
|
|
struct FixedType {
|
|
var f: Int32
|
|
}
|
|
// CHECK-64-LABEL: define{{.*}} swiftcc void @fixed_followed_by_empty_followed_by_non_fixed
|
|
// CHECK-64-NOT: ret
|
|
// CHECK-64: [[FLAGS:%.*]] = load i32, ptr
|
|
// CHECK-64: [[FLAGS2:%.*]] = zext i32 [[FLAGS]] to i64
|
|
// CHECK-64: [[ALIGNMASK:%.*]] = and i64 [[FLAGS2]], 255
|
|
// CHECK-64: [[NOTALIGNMASK:%.*]] = xor i64 [[ALIGNMASK]], -1
|
|
// Make sure we compute the correct offset of the non-fixed field.
|
|
// CHECK-64: [[TMP:%.*]] = add i64 20, [[ALIGNMASK]]
|
|
// CHECK-64: ret
|
|
|
|
sil @foo2 : $@convention(thin) <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1, @in_guaranteed τ_0_2) -> ()
|
|
sil @fixed_followed_by_empty_followed_by_non_fixed : $@convention(thin) (EmptyType, @in_guaranteed SomeType, FixedType) -> () {
|
|
entry(%0 : $EmptyType, %1: $*SomeType, %3: $FixedType):
|
|
%5 = alloc_stack $EmptyType
|
|
store %0 to %5 : $*EmptyType
|
|
%7 = alloc_stack $FixedType
|
|
store %3 to %7 : $*FixedType
|
|
%31 = function_ref @foo2 : $@convention(thin) <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1, @in_guaranteed τ_0_2) -> ()
|
|
%32 = alloc_stack $EmptyType
|
|
copy_addr %5 to [init] %32 : $*EmptyType
|
|
%34 = alloc_stack $SomeType
|
|
copy_addr %1 to [init] %34 : $*SomeType // id: %35
|
|
%36 = partial_apply [callee_guaranteed] %31<FixedType, EmptyType, SomeType>(%7, %32, %34) : $@convention(thin) <τ_0_0, τ_0_1, τ_0_2> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_1, @in_guaranteed τ_0_2) -> ()
|
|
release_value %36: $@callee_guaranteed () ->()
|
|
dealloc_stack %34 : $*SomeType
|
|
dealloc_stack %32 : $*EmptyType
|
|
dealloc_stack %7 : $*FixedType
|
|
dealloc_stack %5 : $*EmptyType
|
|
%40 = tuple()
|
|
return %40 : $()
|
|
}
|
|
|
|
// Test that we don't have an alloc object with 0, because that is not allowed
|
|
// CHECK-LABEL: define{{.*}} swiftcc void @my_test_case
|
|
// CHECK-NOT: swift_allocObject
|
|
// CHECK: ret
|
|
sil @take_empty : $@convention(thin) (@in_guaranteed EmptyType) -> ()
|
|
sil @my_test_case : $@convention(thin) () -> () {
|
|
entry:
|
|
%5 = alloc_stack $EmptyType
|
|
// store % to %5
|
|
%f = function_ref @take_empty : $@convention(thin) (@in_guaranteed EmptyType) -> ()
|
|
%36 = partial_apply [callee_guaranteed] %f(%5) : $@convention(thin) (@in_guaranteed EmptyType) -> ()
|
|
release_value %36: $@callee_guaranteed () ->()
|
|
dealloc_stack %5 : $*EmptyType
|
|
%t = tuple()
|
|
return %t : $()
|
|
}
|