Files
swift-mirror/test/SILOptimizer/eager_specialize.sil
Anthony Latsis 55e5618eab [test] Match nocapture to succeed both on main and rebranch
Both the syntax and relative order of the LLVM `nocapture` parameter
attribute changed upstream in 29441e4f5fa5f5c7709f7cf180815ba97f611297.
To reduce conflicts with rebranch, adjust FileCheck patterns to expect
both syntaxes and orders anywhere the presence of the attribute is not
critical to the test. These changes are temporary and will be cleaned
up once rebranch is merged into main.
2025-05-08 23:52:43 +01:00

897 lines
44 KiB
Plaintext

// RUN: %target-sil-opt -sil-print-types -enable-sil-verify-all -eager-specializer %s | %FileCheck %s
// RUN: %target-sil-opt -sil-print-types -enable-sil-verify-all -eager-specializer %s -o %t.sil && %target-swift-frontend -module-name=eager_specialize -emit-ir %t.sil | %FileCheck --check-prefix=CHECK-IRGEN %s
// RUN: %target-sil-opt -sil-print-types -enable-sil-verify-all -eager-specializer -sil-inline-generics=true -inline %s | %FileCheck --check-prefix=CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE %s
// XFAIL: CPU=arm64e
sil_stage canonical
import Builtin
import Swift
import SwiftShims
public protocol AnElt {
}
public protocol HasElt {
associatedtype Elt
init(e: Elt)
}
struct X : AnElt {
@_hasStorage var i: Int { get set }
init(i: Int)
}
struct S : HasElt {
typealias Elt = X
@_hasStorage var e: X { get set }
init(e: Elt)
}
public struct G<Container : HasElt> {
public func getContainer(e: Container.Elt) -> Container
init()
}
// CHECK: @_specialize(exported: false, kind: full, where T == S)
// CHECK: public func getGenericContainer<T>(g: G<T>, e: T.Elt) -> T where T : HasElt, T.Elt : AnElt
@_specialize(where T == S)
public func getGenericContainer<T>(g: G<T>, e: T.Elt) -> T where T.Elt : AnElt
enum ArithmeticError : Error {
case DivByZero
func hash(into hasher: inout Hasher)
var _code: Int { get }
}
// CHECK: @_specialize(exported: false, kind: full, where T == Int)
// CHECK: public func divideNum<T>(num: T, den: T) throws -> T where T : SignedInteger, T : _ExpressibleByBuiltinIntegerLiteral
@_specialize(where T == Int)
public func divideNum<T : SignedInteger & _ExpressibleByBuiltinIntegerLiteral>(num: T, den: T) throws -> T
@inline(never) @_optimize(none) func foo<T>(t: T) -> Int64
// CHECK: @_specialize(exported: false, kind: full, where T == Int64)
// CHECK: @_specialize(exported: false, kind: full, where T == Float)
// CHECK: public func voidReturn<T>(t: T)
@_specialize(where T == Int64)
@_specialize(where T == Float)
public func voidReturn<T>(t: T)
// CHECK: @_specialize(exported: false, kind: full, where T == Int64)
// CHECK: @_specialize(exported: false, kind: full, where T == Float)
// CHECK: public func nonvoidReturn<T>(t: T) -> Int64
@_specialize(where T == Int64)
@_specialize(where T == Float)
public func nonvoidReturn<T>(t: T) -> Int64
// --- test: protocol conformance, substitution for dependent types
// non-layout dependent generic arguments, emitUncheckedBitCast (non
// address-type)
// Helper
//
// G.getContainer(A.Elt) -> A
sil @$s16eager_specialize1GV12getContaineryx3EltQzF : $@convention(method) <Container where Container : HasElt> (@in Container.Elt, G<Container>) -> @out Container {
bb0(%0 : $*Container, %1 : $*Container.Elt, %2 : $G<Container>):
%4 = witness_method $Container, #HasElt.init!allocator : $@convention(witness_method: HasElt) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, @thick τ_0_0.Type) -> @out τ_0_0
%5 = metatype $@thick Container.Type
%6 = apply %4<Container>(%0, %1, %5) : $@convention(witness_method: HasElt) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, @thick τ_0_0.Type) -> @out τ_0_0
%7 = tuple ()
return %7 : $()
}
// getGenericContainer<A where ...> (G<A>, e : A.Elt) -> A
sil [_specialize where T == S] @$s16eager_specialize19getGenericContainer_1exAA1GVyxG_3EltQztAA03HasF0RzAA02AnF0AHRQlF : $@convention(thin) <T where T : HasElt, T.Elt : AnElt> (G<T>, @in T.Elt) -> @out T {
bb0(%0 : $*T, %1 : $G<T>, %2 : $*T.Elt):
// function_ref G.getContainer(A.Elt) -> A
%5 = function_ref @$s16eager_specialize1GV12getContaineryx3EltQzF : $@convention(method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G<τ_0_0>) -> @out τ_0_0
%6 = apply %5<T>(%0, %2, %1) : $@convention(method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G<τ_0_0>) -> @out τ_0_0
%7 = tuple ()
return %7 : $()
}
// Specialization getGenericContainer<S, X>
//
// CHECK-LABEL: sil shared @$s16eager_specialize19getGenericContainer_1exAA1GVyxG_3EltQztAA03HasF0RzAA02AnF0AHRQlF4main1SV_Tg5 : $@convention(thin) (G<S>, X) -> S {
// CHECK: bb0(%0 : $G<S>, %1 : $X):
// CHECK: return %{{.*}} : $S
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: sil @$s16eager_specialize19getGenericContainer_1exAA1GVyxG_3EltQztAA03HasF0RzAA02AnF0AHRQlF : $@convention(thin) <T where T : HasElt, T.Elt : AnElt> (G<T>, @in T.Elt) -> @out T {
// CHECK: bb0(%0 : $*T, %1 : $G<T>, %2 : $*T.Elt):
// CHECK: %{{.*}} = metatype $@thick T.Type
// CHECK: %{{.*}} = metatype $@thick S.Type
// CHECK: %{{.*}} = unchecked_bitwise_cast %{{.*}} : $@thick T.Type to $Builtin.Word
// CHECK: %{{.*}} = unchecked_bitwise_cast %{{.*}} : $@thick S.Type to $Builtin.Word
// CHECK: %{{.*}} = builtin "cmp_eq_Word"(%{{.*}} : $Builtin.Word, %{{.*}} : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb4, bb1
// CHECK: bb2:
// CHECK: %{{.*}} = function_ref @$s16eager_specialize1GV12getContaineryx3EltQzF : $@convention(method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G<τ_0_0>) -> @out τ_0_0
// CHECK: %{{.*}} = apply %{{.*}}<T>(%0, %2, %1) : $@convention(method) <τ_0_0 where τ_0_0 : HasElt> (@in τ_0_0.Elt, G<τ_0_0>) -> @out τ_0_0
// CHECK: br bb3
// CHECK: bb3:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// CHECK: bb4:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*T to $*S
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %1 : $G<T> to $G<S>
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*T.Elt to $*X
// CHECK: %{{.*}} = load %{{.*}} : $*X
// function_ref specialized getGenericContainer<A where ...> (G<A>, e : A.Elt) -> A
// CHECK: %{{.*}} = function_ref @$s16eager_specialize19getGenericContainer_1exAA1GVyxG_3EltQztAA03HasF0RzAA02AnF0AHRQlF4main1SV_Tg5 : $@convention(thin) (G<S>, X) -> S
// CHECK: %{{.*}} = apply %{{.*}}(%{{.*}}, %{{.*}}) : $@convention(thin) (G<S>, X) -> S
// CHECK: store %{{.*}} to %{{.*}} : $*S
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb3
// --- test: rethrow
// Helper
//
// static != infix<A where ...> (A, A) -> Bool
sil public_external [serialized] @$ss2neoiySbx_xts9EquatableRzlFZ : $@convention(thin) <T where T : Equatable> (@in T, @in T) -> Bool {
bb0(%0 : $*T, %1 : $*T):
%4 = witness_method $T, #Equatable."==" : $@convention(witness_method: Equatable) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool
%5 = metatype $@thick T.Type
%6 = apply %4<T>(%0, %1, %5) : $@convention(witness_method: Equatable) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> Bool
%7 = struct_extract %6 : $Bool, #Bool._value
%8 = integer_literal $Builtin.Int1, -1
%9 = builtin "xor_Int1"(%7 : $Builtin.Int1, %8 : $Builtin.Int1) : $Builtin.Int1
%10 = struct $Bool (%9 : $Builtin.Int1)
return %10 : $Bool
}
// divideNum<A where ...> (A, den : A) throws -> A
sil [_specialize where T == Int] @$s16eager_specialize9divideNum_3denxx_xtKs13SignedIntegerRzlF : $@convention(thin) <T where T : SignedInteger, T : _ExpressibleByBuiltinIntegerLiteral> (@in T, @in T) -> (@out T, @error any Error) {
bb0(%0 : $*T, %1 : $*T, %2 : $*T):
// function_ref static != infix<A where ...> (A, A) -> Bool
%5 = function_ref @$ss2neoiySbx_xts9EquatableRzlFZ : $@convention(thin) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0) -> Bool
%6 = alloc_stack $T
copy_addr %2 to [init] %6 : $*T
%8 = witness_method $T, #_ExpressibleByBuiltinIntegerLiteral.init!allocator : $@convention(witness_method: _ExpressibleByBuiltinIntegerLiteral) <τ_0_0 where τ_0_0 : _ExpressibleByBuiltinIntegerLiteral> (Builtin.IntLiteral, @thick τ_0_0.Type) -> @out τ_0_0
%9 = metatype $@thick T.Type
%10 = integer_literal $Builtin.IntLiteral, 0
%11 = alloc_stack $T
%12 = apply %8<T>(%11, %10, %9) : $@convention(witness_method: _ExpressibleByBuiltinIntegerLiteral) <τ_0_0 where τ_0_0 : _ExpressibleByBuiltinIntegerLiteral> (Builtin.IntLiteral, @thick τ_0_0.Type) -> @out τ_0_0
%13 = apply %5<T>(%6, %11) : $@convention(thin) <τ_0_0 where τ_0_0 : Equatable> (@in τ_0_0, @in τ_0_0) -> Bool
%14 = struct_extract %13 : $Bool, #Bool._value
dealloc_stack %11 : $*T
dealloc_stack %6 : $*T
cond_br %14, bb2, bb1
bb1:
destroy_addr %2 : $*T
destroy_addr %1 : $*T
%24 = alloc_existential_box $Error, $ArithmeticError
%25 = project_existential_box $ArithmeticError in %24 : $Error
%26 = enum $ArithmeticError, #ArithmeticError.DivByZero!enumelt
store %26 to %25 : $*ArithmeticError
throw %24 : $Error
bb2:
%18 = witness_method $T, #BinaryInteger."/" : $@convention(witness_method: BinaryInteger) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
%19 = apply %18<T>(%0, %1, %2, %9) : $@convention(witness_method: BinaryInteger) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
%20 = tuple ()
return %20 : $()
}
// specialized divideNum<A where ...> (A, den : A) throws -> A
// CHECK-LABEL: sil shared @$s16eager_specialize9divideNum_3denxx_xtKSZRzlFSi_Tg5 : $@convention(thin) (Int, Int) -> (Int, @error any Error) {
// CHECK: bb0(%0 : $Int, %1 : $Int):
// CHECK: return %{{.*}}
// CHECK: throw %{{.*}}
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: sil @$s16eager_specialize9divideNum_3denxx_xtKs13SignedIntegerRzlF : $@convention(thin) <T where T : SignedInteger, T : _ExpressibleByBuiltinIntegerLiteral> (@in T, @in T) -> (@out T, @error any Error) {
// CHECK: bb0(%0 : $*T, %1 : $*T, %2 : $*T):
// CHECK: %3 = metatype $@thick T.Type
// CHECK: %4 = metatype $@thick Int.Type
// CHECK: %5 = unchecked_bitwise_cast %3 : $@thick T.Type to $Builtin.Word
// CHECK: %6 = unchecked_bitwise_cast %4 : $@thick Int.Type to $Builtin.Word
// CHECK: %7 = builtin "cmp_eq_Word"(%5 : $Builtin.Word, %6 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %7, bb7, bb1
// CHECK: bb2:
// CHECK: // function_ref static != infix<A>(_:_:)
// CHECK: cond_br %{{.*}}, bb5, bb3
// CHECK: bb3:
// CHECK: br bb4(%{{.*}} : $any Error)
// CHECK: bb4(%{{.*}} : $any Error):
// CHECK: throw %{{.*}} : $any Error
// CHECK: bb5:
// CHECK: %{{.*}} = witness_method $T, #BinaryInteger."/" : {{.*}} : $@convention(witness_method: BinaryInteger) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
// CHECK: apply %{{.*}}<T>({{.*}}) : $@convention(witness_method: BinaryInteger) <τ_0_0 where τ_0_0 : BinaryInteger> (@in τ_0_0, @in τ_0_0, @thick τ_0_0.Type) -> @out τ_0_0
// CHECK: br bb6
// CHECK: bb6:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// CHECK: bb7:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*T to $*Int
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*T to $*Int
// CHECK: %{{.*}} = load %{{.*}} : $*Int
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*T to $*Int
// CHECK: %{{.*}} = load %{{.*}} : $*Int
// CHECK: // function_ref specialized divideNum<A>(_:den:)
// CHECK: %{{.*}} = function_ref @$s16eager_specialize9divideNum_3denxx_xtKSZRzlFSi_Tg5 : $@convention(thin) (Int, Int) -> (Int, @error any Error)
// CHECK: try_apply %{{.*}}(%{{.*}}, %{{.*}}) : $@convention(thin) (Int, Int) -> (Int, @error any Error), normal bb9, error bb8
// CHECK: bb8(%{{.*}} : $any Error):
// CHECK: br bb4(%{{.*}} : $any Error)
// CHECK: bb9(%{{.*}} : $Int):
// CHECK: store %{{.*}} to %{{.*}} : $*Int
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb6
// --- test: multiple void and non-void return values
// foo<A> (A) -> Int64
sil hidden [noinline] [Onone] @$s16eager_specialize3fooys5Int64VxlF : $@convention(thin) <T> (@in T) -> Int64 {
// %0 // users: %1, %4
bb0(%0 : $*T):
%2 = integer_literal $Builtin.Int64, 3
%3 = struct $Int64 (%2 : $Builtin.Int64)
destroy_addr %0 : $*T
return %3 : $Int64
}
// voidReturn<A> (A) -> ()
sil [_specialize where T == Float] [_specialize where T == Int64] @$s16eager_specialize10voidReturnyyxlF : $@convention(thin) <T> (@in T) -> () {
bb0(%0 : $*T):
// function_ref foo<A> (A) -> Int64
%2 = function_ref @$s16eager_specialize3fooys5Int64VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
%3 = apply %2<T>(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
%4 = tuple ()
return %4 : $()
}
// CHECK-LABEL: // specialized voidReturn<A>(_:)
// CHECK: sil shared @$s16eager_specialize10voidReturnyyxlFSf_Tg5 : $@convention(thin) (Float) -> () {
// %0 // user: %2
// CHECK: bb0(%0 : $Float):
// CHECK: return %5 : $()
// CHECK-LABEL: // specialized voidReturn<A>(_:)
// CHECK: sil shared @$s16eager_specialize10voidReturnyyxlFs5Int64V_Tg5 : $@convention(thin) (Int64) -> () {
// CHECK: bb0(%0 : $Int64):
// CHECK: return %5 : $()
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: // voidReturn<A>(_:)
// CHECK: sil @$s16eager_specialize10voidReturnyyxlF : $@convention(thin) <T> (@in T) -> () {
// CHECK: bb0(%0 : $*T):
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %{{.*}}, bb7, bb1
// CHECK: bb2:
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %{{.*}}, bb6, bb3
// CHECK: bb4:
// CHECK: function_ref @$s16eager_specialize3fooys5Int64VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
// CHECK: apply %{{.*}}<T>(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
// CHECK: br bb5
// CHECK: bb5:
// CHECK: tuple ()
// CHECK: return
// CHECK: bb6:
// CHECK: function_ref @$s16eager_specialize10voidReturnyyxlFSf_Tg5 : $@convention(thin) (Float) -> ()
// CHECK: br bb5
// CHECK: bb7:
// CHECK: br bb5
// nonvoidReturn<A>(A) -> Int64
sil [_specialize where T == Float] [_specialize where T == Int64] @$s16eager_specialize13nonvoidReturnys5Int64VxlF : $@convention(thin) <T> (@in T) -> Int64 {
// %0 // users: %1, %3
bb0(%0 : $*T):
// function_ref foo<A>(A) -> Int64
%2 = function_ref @$s16eager_specialize3fooys5Int64VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
%3 = apply %2<T>(%0) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
return %3 : $Int64
}
// CHECK-LABEL: // specialized nonvoidReturn<A>(_:)
// CHECK: sil shared @$s16eager_specialize13nonvoidReturnys5Int64VxlFSf_Tg5 : $@convention(thin) (Float) -> Int64 {
// CHECK: bb0(%0 : $Float):
// CHECK: return %4 : $Int64
// CHECK-LABEL: // specialized nonvoidReturn<A>(_:)
// CHECK: sil shared @$s16eager_specialize13nonvoidReturnys5Int64VxlFAD_Tg5 : $@convention(thin) (Int64) -> Int64 {
// CHECK: bb0(%0 : $Int64):
// CHECK: return %4 : $Int64
// CHECK-LABEL: // nonvoidReturn<A>(_:)
// CHECK: sil @$s16eager_specialize13nonvoidReturnys5Int64VxlF : $@convention(thin) <T> (@in T) -> Int64 {
// CHECK: bb0(%0 : $*T):
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %{{.*}}, bb7, bb1
// CHECK: bb1:
// CHECK: builtin "cmp_eq_Word"
// CHECK: cond_br %{{.*}}, bb6, bb3
// CHECK: bb4:
// CHECK: // function_ref foo<A>(_:)
// CHECK: function_ref @$s16eager_specialize3fooys5Int64VxlF : $@convention(thin) <τ_0_0> (@in τ_0_0) -> Int64
// CHECK: apply %{{.*}}<T>
// CHECK: br bb5(%{{.*}} : $Int64)
// CHECK: bb5(%{{.*}} : $Int64):
// CHECK: return %{{.*}} : $Int64
// CHECK: bb6:
// CHECK: br bb5(%{{.*}} : $Int64)
// CHECK: bb7:
// CHECK: br bb5(%{{.*}} : $Int64)
////////////////////////////////////////////////////////////////////
// Check the ability to specialize for _Trivial(64) and _Trivial(32)
////////////////////////////////////////////////////////////////////
// copyValueAndReturn<A> (A, s : inout A) -> A
sil [noinline] [_specialize where S : _Trivial(32)] [_specialize where S : _Trivial(64)] @$s16eager_specialize18copyValueAndReturn_1sxx_xztlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
bb0(%0 : $*S, %1 : $*S, %2 : $*S):
copy_addr %2 to [init] %0 : $*S
destroy_addr %1 : $*S
%7 = tuple ()
return %7 : $()
} // end sil function '$s16eager_specialize18copyValueAndReturn_1sxx_xztlF'
// Check specialized for 32 bits
// specialized copyValueAndReturn<A>(A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze31_lIetilr_Tp5 : $@convention(thin) <S where S : _Trivial(32)> (@in S, @inout S) -> @out S
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: copy_addr %2 to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// CHECK: } // end sil function '$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze31_lIetilr_Tp5'
// Check specialized for 64 bits
// specialized copyValueAndReturn<A>(A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze63_lIetilr_Tp5 : $@convention(thin) <S where S : _Trivial(64)> (@in S, @inout S) -> @out S
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: copy_addr %2 to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// CHECK: } // end sil function '$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze63_lIetilr_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
//
// CHECK-LABEL: sil [noinline] @$s16eager_specialize18copyValueAndReturn_1sxx_xztlF : $@convention(thin) <S> (@in S, @inout S) -> @out S
// Check if size == 8 bytes, i.e. 64 444its
// CHECK: %{{.*}} = metatype $@thick S.Type
// CHECK: %{{.*}} = builtin "sizeof"<S>(%{{.*}} : $@thick S.Type) : $Builtin.Word
// CHECK: %{{.*}} = integer_literal $Builtin.Word, 8
// CHECK: %{{.*}} = builtin "cmp_eq_Word"(%{{.*}} : $Builtin.Word, %{{.*}} : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb10, bb1
// Check if size == 4 bytes, i.32 2 2 2 bits
// CHECK: bb1:
// CHECK: %{{.*}} = metatype $@thick S.Type
// CHECK: %{{.*}} = builtin "sizeof"<S>(%{{.*}} : $@thick S.Type) : $Builtin.Word
// CHECK: %{{.*}} = integer_literal $Builtin.Word, 4
// CHECK: %{{.*}} = builtin "cmp_eq_Word"(%{{.*}} : $Builtin.Word, %{{.*}} : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb8, bb4
// None of the constraint checks was successful, perform a generic copy.
// CHECK: bb6:
// CHECK: copy_addr %{{.*}} to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: br bb7
// CHECK: bb7:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// Check if it is a trivial type
// CHECK: bb8:
// CHECK: %{{.*}} = builtin "ispod"<S>(%{{.*}} : $@thick S.Type) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb9, bb5
// Invoke the specialized function for 32 bits
// CHECK: bb9:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn<A> (A, s : inout A) -> A
// CHECK: %{{.*}} = function_ref @$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze31_lIetilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(32)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = apply %{{.*}}<S>(%{{.*}}, %{{.*}}, %{{.*}}) : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(32)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb7
// Check if it is a trivial type
// CHECK: bb10:
// CHECK: %{{.*}} = builtin "ispod"<S>(%{{.*}} : $@thick S.Type) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb11, bb2
// Invoke the specialized function for 64 bits
// CHECK: bb11:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn<A> (A, s : inout A) -> A
// CHECK: %{{.*}} = function_ref @$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze63_lIetilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(64)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = apply %{{.*}}<S>(%{{.*}}, %{{.*}}, %{{.*}}) : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial(64)> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb7
// CHECK: } // end sil function '$s16eager_specialize18copyValueAndReturn_1sxx_xztlF'
////////////////////////////////////////////////////////////////////
// Check the ability to specialize for _Trivial
////////////////////////////////////////////////////////////////////
// copyValueAndReturn2<A> (A, s : inout A) -> A
sil [noinline] [_specialize where S : _Trivial] @$s16eager_specialize19copyValueAndReturn2_1sxx_xztlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
bb0(%0 : $*S, %1 : $*S, %2 : $*S):
copy_addr %2 to [init] %0 : $*S
destroy_addr %1 : $*S
%7 = tuple ()
return %7 : $()
} // end sil function '$s16eager_specialize19copyValueAndReturn2_1sxx_xztlF'
// Check the specialization for _Trivial
// specialized copyValueAndReturn2<A> (A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @$s16eager_specialize19copyValueAndReturn2_1sxx_xztlFxxxRlzTlIetilr_Tp5 : $@convention(thin) <S where S : _Trivial> (@in S, @inout S) -> @out S
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: copy_addr %2 to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: %5 = tuple ()
// CHECK: return %5 : $()
// CHECK: } // end sil function '$s16eager_specialize19copyValueAndReturn2_1sxx_xztlFxxxRlzTlIetilr_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
// copyValueAndReturn2<A> (A, s : inout A) -> A
// CHECK-LABEL: sil [noinline] @$s16eager_specialize19copyValueAndReturn2_1sxx_xztlF : $@convention(thin) <S> (@in S, @inout S) -> @out S
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: %3 = metatype $@thick S.Type
// CHECK: %4 = builtin "ispod"<S>(%3 : $@thick S.Type) : $Builtin.Int1
// CHECK: cond_br %4, bb4, bb1
// None of the constraint checks was successful, perform a generic copy.
// CHECK: bb2:
// CHECK: copy_addr %2 to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: br bb3
// CHECK: bb3:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// Invoke the specialized function for trivial types
// CHECK: bb4:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn2<A> (A, s : inout A) -> A
// CHECK: %{{.*}} = function_ref @$s16eager_specialize19copyValueAndReturn2_1sxx_xztlFxxxRlzTlIetilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = apply %{{.*}}<S>(%{{.*}}, %{{.*}}, %{{.*}}) : $@convention(thin) <τ_0_0 where τ_0_0 : _Trivial> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb3
// CHECK: } // end sil function '$s16eager_specialize19copyValueAndReturn2_1sxx_xztlF'
////////////////////////////////////////////////////////////////////
// Check the ability to specialize for _RefCountedObject
////////////////////////////////////////////////////////////////////
// copyValueAndReturn3<A> (A, s : inout A) -> A
sil [noinline] [_specialize where S : _RefCountedObject] @$s16eager_specialize19copyValueAndReturn3_1sxx_xztlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
bb0(%0 : $*S, %1 : $*S, %2 : $*S):
copy_addr %2 to [init] %0 : $*S
destroy_addr %1 : $*S
%7 = tuple ()
return %7 : $()
} // end sil function '$s16eager_specialize19copyValueAndReturn3_1sxx_xztlF'
// Check for specialized function for _RefCountedObject
// specialized copyValueAndReturn3<A> (A, s : inout A) -> A
// CHECK-LABEL: sil shared [noinline] @$s16eager_specialize19copyValueAndReturn3_1sxx_xztlFxxxRlzRlIetilr_Tp5 : $@convention(thin) <S where S : _RefCountedObject> (@in S, @inout S) -> @out S
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: copy_addr %2 to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: %5 = tuple ()
// CHECK: return %5 : $()
// CHECK: } // end sil function '$s16eager_specialize19copyValueAndReturn3_1sxx_xztlFxxxRlzRlIetilr_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
// copyValueAndReturn3<A> (A, s : inout A) -> A
// CHECK-LABEL: sil [noinline] @$s16eager_specialize19copyValueAndReturn3_1sxx_xztlF : $@convention(thin) <S> (@in S, @inout S) -> @out S {
// Check if can be a class
// CHECK: bb0(%0 : $*S, %1 : $*S, %2 : $*S):
// CHECK: %3 = metatype $@thick S.Type
// CHECK: %4 = builtin "canBeClass"<S>(%3 : $@thick S.Type) : $Builtin.Int8
// CHECK: %5 = integer_literal $Builtin.Int8, 1
// CHECK: %6 = builtin "cmp_eq_Int8"(%4 : $Builtin.Int8, %5 : $Builtin.Int8) : $Builtin.Int1
// True if it is a Swift class
// CHECK: cond_br %6, bb5, bb8
// CHECK: bb3:
// CHECK: copy_addr %2 to [init] %0 : $*S
// CHECK: destroy_addr %1 : $*S
// CHECK: br bb4
// CHECK: bb4:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// Invoke the specialized function for ref-conted objects
// CHECK: bb7:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*S to $*S
// CHECK: %{{.*}} = unchecked_addr_cast %2 : $*S to $*S
// function_ref specialized copyValueAndReturn3<A> (A, s : inout A) -> A
// CHECK: %{{.*}} = function_ref @$s16eager_specialize19copyValueAndReturn3_1sxx_xztlFxxxRlzRlIetilr_Tp5 : $@convention(thin) <τ_0_0 where τ_0_0 : _RefCountedObject> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = apply %{{.*}}<S>(%{{.*}}, %{{.*}}, %{{.*}}) : $@convention(thin) <τ_0_0 where τ_0_0 : _RefCountedObject> (@in τ_0_0, @inout τ_0_0) -> @out τ_0_0
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb4
// Check if the object could be of a class or objc existential type
// CHECK: bb8:
// CHECK: %{{.*}} = integer_literal $Builtin.Int8, 2
// CHECK: %{{.*}} = builtin "cmp_eq_Int8"(%{{.*}} : $Builtin.Int8, %{{.*}} : $Builtin.Int8) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb9, bb1
// CHECK: bb9:
// function_ref _swift_isClassOrObjCExistentialType
// CHECK: %{{.*}} = function_ref @_swift_isClassOrObjCExistentialType : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> Bool
// CHECK: %{{.*}} = apply %{{.*}}<S>(%{{.*}}) : $@convention(thin) <τ_0_0> (@thick τ_0_0.Type) -> Bool
// CHECK: %{{.*}} = struct_extract %{{.*}} : $Bool, #Bool._value
// CHECK: cond_br %{{.*}}, bb6, bb2
// CHECK: } // end sil function '$s16eager_specialize19copyValueAndReturn3_1sxx_xztlF'
////////////////////////////////////////////////////////////////////
// Check the ability to produce exported specializations, which can
// be referenced from other object files.
////////////////////////////////////////////////////////////////////
// exportSpecializations<A> (A) -> ()
sil [_specialize exported: true, where T == Int64] @$s16eager_specialize21exportSpecializationsyyxlF : $@convention(thin) <T> (@in T) -> () {
bb0(%0 : $*T):
destroy_addr %0 : $*T
%3 = tuple ()
return %3 : $()
} // end sil function '$s16eager_specialize21exportSpecializationsyyxlF'
////////////////////////////////////////////////////////////////////
// Check the ability to produce explicit partial specializations.
////////////////////////////////////////////////////////////////////
// checkExplicitPartialSpecialization<A, B> (A, B) -> ()
sil [_specialize kind: partial, where T == Int64] @$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lF : $@convention(thin) <T, S> (@in T, @in S) -> () {
bb0(%0 : $*T, %1 : $*S):
destroy_addr %1 : $*S
destroy_addr %0 : $*T
%6 = tuple ()
return %6 : $()
} // end sil function '$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lF'
// Check for specialized function for τ_0_0 == Int64
// specialized checkExplicitPartialSpecialization<A, B> (A, B) -> ()
// CHECK-LABEL: sil shared @$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lFs5Int64Vq_ADRszr0_lIetyi_Tp5 : $@convention(thin) <T, S where T == Int64> (Int64, @in S) -> ()
// CHECK: bb0(%0 : $Int64, %1 : $*S):
// CHECK: %2 = alloc_stack $Int64
// CHECK: store %0 to %2 : $*Int64
// CHECK: destroy_addr %1 : $*S
// CHECK: destroy_addr %2 : $*Int64
// CHECK: %6 = tuple ()
// CHECK: dealloc_stack %2 : $*Int64
// CHECK: return %6 : $()
// CHECK: } // end sil function '$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lFs5Int64Vq_ADRszr0_lIetyi_Tp5'
// Generic with specialized dispatch. No more [specialize] attribute.
// checkExplicitPartialSpecialization<A, B> (A, B) -> ()
// CHECK-LABEL: sil @$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lF : $@convention(thin) <T, S> (@in T, @in S) -> ()
// CHECK: bb0(%0 : $*T, %1 : $*S):
// CHECK: %2 = metatype $@thick T.Type
// CHECK: %3 = metatype $@thick Int64.Type
// CHECK: %4 = unchecked_bitwise_cast %2 : $@thick T.Type to $Builtin.Word
// CHECK: %5 = unchecked_bitwise_cast %3 : $@thick Int64.Type to $Builtin.Word
// CHECK: %6 = builtin "cmp_eq_Word"(%4 : $Builtin.Word, %5 : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %6, bb4, bb1
// Type dispatch was not successful.
// CHECK: bb2:
// CHECK: destroy_addr %1 : $*S
// CHECK: destroy_addr %0 : $*T
// CHECK: br bb3
// CHECK: bb3:
// CHECK: %{{.*}} = tuple ()
// CHECK: return %{{.*}} : $()
// Invoke a partially specialized function.
// CHECK: bb4:
// CHECK: %{{.*}} = unchecked_addr_cast %0 : $*T to $*Int64
// CHECK: %{{.*}} = load %{{.*}} : $*Int64
// CHECK: %{{.*}} = unchecked_addr_cast %1 : $*S to $*S
// function_ref specialized checkExplicitPartialSpecialization<A, B> (A, B) -> ()
// CHECK: %{{.*}} = function_ref @$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lFs5Int64Vq_ADRszr0_lIetyi_Tp5 : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 == Int64> (Int64, @in τ_0_1) -> ()
// CHECK: %{{.*}} = apply %{{.*}}<Int64, S>(%{{.*}}, %{{.*}}) : $@convention(thin) <τ_0_0, τ_0_1 where τ_0_0 == Int64> (Int64, @in τ_0_1) -> ()
// CHECK: %{{.*}} = tuple ()
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $() to $()
// CHECK: br bb3
// CHECK: } // end sil function '$s16eager_specialize34checkExplicitPartialSpecializationyyx_q_tr0_lF'
/////////////////////////////////////////////////////////////////////////
// Check that functions with unreachable instructions can be specialized.
/////////////////////////////////////////////////////////////////////////
protocol P {
}
struct T : P {
init()
}
extension P {
public static func f(_ x: Self) -> Self
}
sil @error : $@convention(thin) () -> Never
// CHECK-LABEL: sil @$s16eager_specialize1PPAAE1fyxxFZ : $@convention(method) <Self where Self : P> (@in Self, @thick Self.Type) -> @out Self
// CHECK: %{{.*}} = metatype $@thick Self.Type
// CHECK: %{{.*}} = metatype $@thick T.Type
// CHECK: %{{.*}} = unchecked_bitwise_cast %{{.*}} : $@thick Self.Type to $Builtin.Word
// CHECK: %{{.*}} = unchecked_bitwise_cast %{{.*}} : $@thick T.Type to $Builtin.Word
// CHECK: %{{.*}} = builtin "cmp_eq_Word"(%{{.*}} : $Builtin.Word, %{{.*}} : $Builtin.Word) : $Builtin.Int1
// CHECK: cond_br %{{.*}}, bb3, bb1
// CHECK: bb2:
// CHECK: %{{.*}} = function_ref @error : $@convention(thin) () -> Never
// CHECK: %{{.*}} = apply %{{.*}}() : $@convention(thin) () -> Never
// CHECK: unreachable
// CHECK: bb3:
// CHECK: %{{.*}} = unchecked_addr_cast %{{.*}} : $*Self to $*T
// CHECK: %{{.*}} = unchecked_addr_cast %{{.*}} : $*Self to $*T
// CHECK: %{{.*}} = load %{{.*}} : $*T
// CHECK: %{{.*}} = unchecked_trivial_bit_cast %{{.*}} : $@thick Self.Type to $@thick T.Type
// CHECK: %{{.*}} = function_ref @$s16eager_specialize1PPAAE1fyxxFZ4main1TV_Tg5 : $@convention(method) (T, @thick T.Type) -> T
// CHECK: %{{.*}} = apply %{{.*}}(%{{.*}}, %{{.*}}) : $@convention(method) (T, @thick T.Type) -> T
// CHECK: store %{{.*}} to %{{.*}} : $*T
// CHECK: %{{.*}} = tuple ()
// CHECK: unreachable
// CHECK: } // end sil function '$s16eager_specialize1PPAAE1fyxxFZ'
sil [_specialize exported: false, kind: full, where Self == T] @$s16eager_specialize1PPAAE1fyxxFZ : $@convention(method) <Self where Self : P> (@in Self, @thick Self.Type) -> @out Self {
bb0(%0 : $*Self, %1 : $*Self, %2 : $@thick Self.Type):
// function_ref error
%5 = function_ref @error : $@convention(thin) () -> Never
%6 = apply %5() : $@convention(thin) () -> Never
unreachable
} // end sil function '$s16eager_specialize1PPAAE1fyxxFZ'
////////////////////////////////////////////////////////////////////
// Check that IRGen generates efficient code for fixed-size Trivial
// constraints.
////////////////////////////////////////////////////////////////////
// Check that a specialization for _Trivial(32) uses direct loads and stores
// instead of value witness functions to load and store the value of a generic type.
// CHECK-IRGEN-LABEL: define linkonce_odr hidden swiftcc void @"$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze31_lIetilr_Tp5"(ptr noalias{{( nocapture)?}} sret(i32){{( captures\(none\))?}} %0, ptr noalias {{(nocapture|captures\(none\))}} dereferenceable(4) %1, ptr {{(nocapture|captures\(none\))}} dereferenceable(4) %2, ptr %S
// CHECK-IRGEN: entry:
// CHECK-IRGEN: %3 = load i32, ptr %2
// CHECK-IRGEN-NEXT: store i32 %3, ptr %0
// CHECK-IRGEN-NEXT: ret void
// CHECK-IRGEN-NEXT:}
// Check that a specialization for _Trivial(64) uses direct loads and stores
// instead of value witness functions to load and store the value of a generic type.
// CHECK-IRGEN-LABEL: define linkonce_odr hidden swiftcc void @"$s16eager_specialize18copyValueAndReturn_1sxx_xztlFxxxRlze63_lIetilr_Tp5"(ptr noalias{{( nocapture)?}} sret(i64){{( captures\(none\))?}} %0, ptr noalias {{(nocapture|captures\(none\))}} dereferenceable(8) %1, ptr {{(nocapture|captures\(none\))}} dereferenceable(8) %2, ptr %S
// CHECK-IRGEN: entry:
// CHECK-IRGEN: %3 = load i64, ptr %2
// CHECK-IRGEN-NEXT: store i64 %3, ptr %0
// CHECK-IRGEN-NEXT: ret void
// CHECK-IRGEN-NEXT: }
// Check that a specialization for _Trivial does not call the 'destroy' value witness,
// because it is known that the object is Trivial, i.e. contains no references.
// CHECK-IRGEN-LABEL: define linkonce_odr hidden swiftcc void @"$s16eager_specialize19copyValueAndReturn2_1sxx_xztlFxxxRlzTlIetilr_Tp5"(ptr noalias sret(%swift.opaque) %0, ptr noalias %1, ptr %2, ptr %S
// CHECK-IRGEN-NEXT: entry:
// CHECK-IRGEN: %3 = getelementptr inbounds ptr, ptr %S, i{{.*}} -1
// CHECK-IRGEN-NEXT: %S.valueWitnesses = load ptr, ptr %3
// CHECK-IRGEN-NEXT: %4 = getelementptr inbounds ptr, ptr %S.valueWitnesses
// CHECK-IRGEN-NEXT: %InitializeWithCopy = load ptr, ptr %4
// CHECK-IRGEN-arm64e-NEXT: ptrtoint ptr %4 to i64
// CHECK-IRGEN-arm64e-NEXT: call i64 @llvm.ptrauth.blend.i64
// CHECK-IRGEN-NEXT: call {{.*}} %InitializeWithCopy
// CHECK-IRGEN-NEXT: ret void
// CHECK-IRGEN-NEXT: }
// Check that a specialization for _RefCountedObject just copies the fixed-size reference,
// and call retain/release directly, instead of calling the value witness functions.
// The matching patterns in this test are rather non-precise to cover both objc and non-objc platforms.
// CHECK-IRGEN-LABEL: define{{.*}}@"$s16eager_specialize19copyValueAndReturn3_1sxx_xztlFxxxRlzRlIetilr_Tp5"
// CHECK-IRGEN: entry:
// CHECK-IRGEN-NOT: ret void
// CHECK-IRGEN: call {{.*}}etain
// CHECK-IRGEN-NOT: ret void
// CHECK-IRGEN: call {{.*}}elease
// CHECK-IRGEN: ret void
////////////////////////////////////////////////////////////////////
// Check that try_apply instructions are handled correctly by the
// eager specializer.
////////////////////////////////////////////////////////////////////
protocol ThrowingP {
func action() throws -> Int64
}
extension Int64 : ThrowingP {
public func action() throws -> Int64
}
class ClassUsingThrowingP {
required init()
@_specialize(exported: false, kind: full, where T == Int64)
public static func f<T>(_: T) throws -> Self where T : ThrowingP
@_specialize(exported: false, kind: full, where T == Int64)
public static func g<T>(_ t: T) throws -> Int64 where T : ThrowingP
deinit
}
// Int64.action()
sil @$ss5Int64V34eager_specialize_throwing_functionE6actionAByKF : $@convention(method) (Int64) -> (Int64, @error any Error)
// protocol witness for ThrowingP.action() in conformance Int64
sil @$ss5Int64V34eager_specialize_throwing_function9ThrowingPA2cDP6actionAByKFTW : $@convention(witness_method: ThrowingP) (@in_guaranteed Int64) -> (Int64, @error any Error)
sil @$s34eager_specialize_throwing_function19ClassUsingThrowingPCACycfc : $@convention(method) (@owned ClassUsingThrowingP) -> @owned ClassUsingThrowingP
sil @$s34eager_specialize_throwing_function19ClassUsingThrowingPCfd : $@convention(method) (@guaranteed ClassUsingThrowingP) -> @owned Builtin.NativeObject
// ClassUsingThrowingP.__allocating_init()
sil @$s34eager_specialize_throwing_function19ClassUsingThrowingPCACycfC : $@convention(method) (@thick ClassUsingThrowingP.Type) -> @owned ClassUsingThrowingP
// ClassUsingThrowingP.__deallocating_deinit
sil @$s34eager_specialize_throwing_function19ClassUsingThrowingPCfD : $@convention(method) (@owned ClassUsingThrowingP) -> ()
// f is a function that may throw according to its type, but does not actually do it.
// Check that this function is properly specialized by the eager specializer.
// It should dispatch to its specialized version, but use apply [nothrow] to invoke
// the specialized version.
// CHECK-LABEL: sil @$s34eager_specialize_throwing_function19ClassUsingThrowingPC1fyACXDxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (@owned ClassUsingThrowingP, @error any Error)
// CHECK: [[SPECIALIZED:%.*]] = function_ref @$s34eager_specialize_throwing_function19ClassUsingThrowingPC1fyACXDxKAA0G1PRzlFZs5Int64V_Tg5 : $@convention(method) (Int64, @thick ClassUsingThrowingP.Type) -> (@owned ClassUsingThrowingP, @error any Error)
// CHECK: apply [nothrow] [[SPECIALIZED]]
// CHECK: // end sil function '$s34eager_specialize_throwing_function19ClassUsingThrowingPC1fyACXDxKAA0G1PRzlFZ'
// static ClassUsingThrowingP.f<A>(_:)
sil [_specialize exported: false, kind: full, where T == Int64] @$s34eager_specialize_throwing_function19ClassUsingThrowingPC1fyACXDxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (@owned ClassUsingThrowingP, @error any Error) {
bb0(%0 : $*T, %1 : $@thick ClassUsingThrowingP.Type):
destroy_addr %0 : $*T
%4 = unchecked_trivial_bit_cast %1 : $@thick ClassUsingThrowingP.Type to $@thick @dynamic_self ClassUsingThrowingP.Type
// function_ref ClassUsingThrowingP.__allocating_init()
%7 = function_ref @$s34eager_specialize_throwing_function19ClassUsingThrowingPCACycfC : $@convention(method) (@thick ClassUsingThrowingP.Type) -> @owned ClassUsingThrowingP
%8 = upcast %4 : $@thick @dynamic_self ClassUsingThrowingP.Type to $@thick ClassUsingThrowingP.Type
%9 = apply %7(%8) : $@convention(method) (@thick ClassUsingThrowingP.Type) -> @owned ClassUsingThrowingP
%10 = unchecked_ref_cast %9 : $ClassUsingThrowingP to $ClassUsingThrowingP
return %10 : $ClassUsingThrowingP
} // end sil function '$s34eager_specialize_throwing_function19ClassUsingThrowingPC1fyACXDxKAA0G1PRzlFZ'
// g is a function that may throw according to its type and has a try_apply inside
// its body.
// Check that this function is properly specialized by the eager specializer.
// It should dispatch to its specialized version and use try_apply to invoke
// the specialized version.
// CHECK-LABEL: sil @$s34eager_specialize_throwing_function19ClassUsingThrowingPC1gys5Int64VxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (Int64, @error any Error)
// CHECK: [[SPECIALIZED:%.*]] = function_ref @$s34eager_specialize_throwing_function19ClassUsingThrowingPC1gys5Int64VxKAA0G1PRzlFZAF_Tg5 : $@convention(method) (Int64, @thick ClassUsingThrowingP.Type) -> (Int64, @error any Error)
// CHECK: try_apply [[SPECIALIZED]]
// CHECK: // end sil function '$s34eager_specialize_throwing_function19ClassUsingThrowingPC1gys5Int64VxKAA0G1PRzlFZ'
// static ClassUsingThrowingP.g<A>(_:)
sil [_specialize exported: false, kind: full, where T == Int64] @$s34eager_specialize_throwing_function19ClassUsingThrowingPC1gys5Int64VxKAA0G1PRzlFZ : $@convention(method) <T where T : ThrowingP> (@in T, @thick ClassUsingThrowingP.Type) -> (Int64, @error any Error) {
bb0(%0 : $*T, %1 : $@thick ClassUsingThrowingP.Type):
%5 = witness_method $T, #ThrowingP.action : <Self where Self : ThrowingP> (Self) -> () throws -> Int64 : $@convention(witness_method: ThrowingP) <τ_0_0 where τ_0_0 : ThrowingP> (@in_guaranteed τ_0_0) -> (Int64, @error any Error)
try_apply %5<T>(%0) : $@convention(witness_method: ThrowingP) <τ_0_0 where τ_0_0 : ThrowingP> (@in_guaranteed τ_0_0) -> (Int64, @error any Error), normal bb1, error bb2
bb1(%7 : $Int64): // Preds: bb0
destroy_addr %0 : $*T
return %7 : $Int64
bb2(%10 : $Error): // Preds: bb0
destroy_addr %0 : $*T
throw %10 : $Error
} // end sil function '$s34eager_specialize_throwing_function19ClassUsingThrowingPC1gys5Int64VxKAA0G1PRzlFZ'
// Check that a specialization was produced and it is not inlined.
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE-LABEL: sil{{.*}}@{{.*}}testSimpleGeneric{{.*}}where T : _Trivial(64, 64)
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE-LABEL: sil{{.*}}@testSimpleGeneric :
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: [[METATYPE:%.*]] = metatype $@thick T.Type
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: [[SIZEOF:%.*]] = builtin "sizeof"<T>([[METATYPE]] : $@thick T.Type)
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: [[SIZE:%.*]] = integer_literal $Builtin.Word, 8
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: builtin "cmp_eq_Word"([[SIZEOF]] : $Builtin.Word, [[SIZE]] : $Builtin.Word)
// Invoke the specialization, but do not inline it!
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: function_ref @{{.*}}testSimpleGeneric{{.*}}
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: apply
// CHECK-EAGER-SPECIALIZE-AND-GENERICS-INLINE: // end sil function 'testSimpleGeneric'
sil [_specialize exported: false, kind: full, where T: _Trivial(64, 64)] @testSimpleGeneric : $@convention(thin) <T>(@in T) -> Builtin.Int64 {
bb0(%0 : $*T):
%1 = metatype $@thick T.Type
%2 = builtin "sizeof"<T>(%1 : $@thick T.Type) : $Builtin.Word
%8 = builtin "zextOrBitCast_Word_Int64"(%2 : $Builtin.Word) : $Builtin.Int64
destroy_addr %0 : $*T
return %8 : $Builtin.Int64
}
// Don't specialize `shared` definitions they are imported from another module.
// CHECK-NOT: sil @$s24testDontSpecializeSharedSi_Ts5
sil shared [_specialize exported: true, kind: full, where T == Int] @testDontSpecializeShared : $@convention(thin) <T>(@in T) -> () {
bb(%0: $*T):
destroy_addr %0 : $*T
%t = tuple()
return %t : $()
}
// But do specialize `shared` definitions when they are target from another // function.
// CHECK: sil @$s24testDontSpecializeSharedSd_Ts5
sil shared [_specialize exported: true, kind: full, target: "testDontSpecializeShared" ,where T == Double] @butSpecializeWhenTargetIsPresent : $@convention(thin) <T>(@in T) -> () {
bb(%0: $*T):
destroy_addr %0 : $*T
%t = tuple()
return %t : $()
}
// CHECK-LABEL: sil [ossa] @testReturnBlockWithArgument
// CHECK: [[BB1:bb[0-9]+]](%{{.*}} : $Optional<()>):
// CHECK-NEXT: br [[BBRET:bb[0-9]+]]
// CHECK: [[BBRET]]:
// CHECK-NEXT: tuple ()
// CHECK-NEXT: return
// CHECK: } // end sil function 'testReturnBlockWithArgument'
sil [_specialize exported: false, kind: full, where T == S] [ossa] @testReturnBlockWithArgument : $@convention(thin) <T> (@thick T.Type) -> () {
bb0(%0 : $@thick T.Type):
%41 = enum $Optional<()>, #Optional.none!enumelt
br bb4(%41 : $Optional<()>)
bb4(%37 : $Optional<()>):
%38 = tuple ()
return %38 : $()
}
sil_vtable ClassUsingThrowingP {
#ClassUsingThrowingP.init!allocator: (ClassUsingThrowingP.Type) -> () -> ClassUsingThrowingP : @$s34eager_specialize_throwing_function19ClassUsingThrowingPCACycfC // ClassUsingThrowingP.__allocating_init()
#ClassUsingThrowingP.init!initializer: (ClassUsingThrowingP.Type) -> () -> ClassUsingThrowingP : @$s34eager_specialize_throwing_function19ClassUsingThrowingPCACycfc // ClassUsingThrowingP.init()
#ClassUsingThrowingP.deinit!deallocator: @$s34eager_specialize_throwing_function19ClassUsingThrowingPCfD // ClassUsingThrowingP.__deallocating_deinit
}
sil_witness_table hidden Int64: ThrowingP module eager_specialize_throwing_function {
method #ThrowingP.action: <Self where Self : ThrowingP> (Self) -> () throws -> Int64 : @$ss5Int64V34eager_specialize_throwing_function9ThrowingPA2cDP6actionAByKFTW // protocol witness for ThrowingP.action() in conformance Int64
}
sil_default_witness_table hidden ThrowingP {
no_default
}