mirror of
https://github.com/apple/swift.git
synced 2025-12-21 12:14:44 +01:00
Introduce a new pass MandatoryTempRValueElimination, which works as the original TempRValueElimination, except that it does not remove any alloc_stack instruction which are associated with source variables. Running this pass at Onone helps to reduce copies of large structs, e.g. InlineArrays or structs containing InlineArrays. Copying large structs can be a performance problem, even at Onone. rdar://151629149
1623 lines
65 KiB
Plaintext
1623 lines
65 KiB
Plaintext
// RUN: %target-sil-opt -sil-print-types -enable-sil-verify-all %s -update-borrowed-from -temp-rvalue-elimination | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-OPT
|
|
// RUN: %target-sil-opt -sil-print-types -enable-sil-verify-all %s -update-borrowed-from -mandatory-temp-rvalue-elimination | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-ONONE
|
|
|
|
// REQUIRES: swift_in_compiler
|
|
|
|
sil_stage canonical
|
|
|
|
import Builtin
|
|
import Swift
|
|
|
|
/////////////
|
|
// Utility //
|
|
/////////////
|
|
|
|
struct GS<Base> {
|
|
var _base: Base
|
|
var _value: Builtin.Int64
|
|
}
|
|
|
|
class Klass {
|
|
@_hasStorage var i: Int
|
|
init()
|
|
}
|
|
|
|
class OtherClass {
|
|
var klass: Klass
|
|
}
|
|
|
|
struct Two {
|
|
var a: Klass
|
|
var b: Klass
|
|
}
|
|
|
|
struct NonTrivialStruct {
|
|
var val:Klass
|
|
}
|
|
|
|
public enum FakeOptional<T> {
|
|
case none
|
|
case some(T)
|
|
}
|
|
|
|
struct MOS: ~Copyable {}
|
|
|
|
protocol P {
|
|
func foo()
|
|
}
|
|
|
|
sil [ossa] @getKlass : $@convention(thin) () -> @owned Klass
|
|
sil [ossa] @getNonTrivialStruct : $@convention(thin) () -> @owned NonTrivialStruct
|
|
sil [ossa] @getMOS : $@convention(thin) () -> @owned MOS
|
|
|
|
sil @unknown : $@convention(thin) () -> ()
|
|
sil [readonly] [ossa] @read_only_coroutine : $@yield_once @convention(thin) () -> @yields @in_guaranteed P
|
|
|
|
sil [ossa] @guaranteed_user : $@convention(thin) (@guaranteed Klass) -> ()
|
|
sil [ossa] @guaranteed_user_with_result : $@convention(thin) (@guaranteed Klass) -> @out Klass
|
|
sil [ossa] @inguaranteed_user_without_result_NTS : $@convention(thin) (@in_guaranteed NonTrivialStruct) -> ()
|
|
sil [ossa] @inguaranteed_user_without_result_MOS : $@convention(thin) (@in_guaranteed MOS) -> ()
|
|
|
|
sil [ossa] @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
sil [ossa] @inguaranteed_user_with_result : $@convention(thin) (@in_guaranteed Klass) -> @out Klass {
|
|
bb0(%0 : $*Klass, %1 : $*Klass):
|
|
copy_addr %1 to [init] %0 : $*Klass
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
sil @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error)
|
|
sil @use_gsbase_builtinnativeobject : $@convention(thin) (@guaranteed GS<Builtin.NativeObject>) -> ()
|
|
sil [readonly] @readonly_throwing_func : $@convention(thin) (@in_guaranteed Int) -> @error Error
|
|
|
|
sil_global @globalString : $String
|
|
|
|
///////////
|
|
// Tests //
|
|
///////////
|
|
|
|
// CHECK-LABEL: sil [ossa] @rvalue_simple
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>):
|
|
// CHECK: [[A1:%.*]] = struct_element_addr %0 : $*GS<B>, #GS._value
|
|
// CHECK: [[V1:%.*]] = load [trivial] [[A1]] : $*Builtin.Int64
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: [[A2:%.*]] = struct_element_addr %1 : $*GS<B>, #GS._value
|
|
// CHECK: [[V2:%.*]] = load [trivial] [[A2]] : $*Builtin.Int64
|
|
// CHECK: %{{.*}} = builtin "cmp_slt_Int64"([[V1]] : $Builtin.Int64, [[V2]] : $Builtin.Int64) : $Builtin.Int1
|
|
// CHECK-NOT: destroy_addr
|
|
// CHECK-NOT: dealloc_stack
|
|
// CHECK: return %{{.*}} : $()
|
|
// CHECK-LABEL: } // end sil function 'rvalue_simple'
|
|
sil [ossa] @rvalue_simple : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>):
|
|
%2 = struct_element_addr %0 : $*GS<B>, #GS._value
|
|
%3 = load [trivial] %2 : $*Builtin.Int64
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%3 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @multi_block :
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK-LABEL: } // end sil function 'multi_block'
|
|
sil [ossa] @multi_block : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>):
|
|
%2 = struct_element_addr %0 : $*GS<B>, #GS._value
|
|
%3 = load [trivial] %2 : $*Builtin.Int64
|
|
%4 = alloc_stack $GS<B>
|
|
br bb1
|
|
|
|
bb1:
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%3 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
br bb2
|
|
|
|
bb2:
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_from_temp
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: copy_addr %1 to [init] %0 : $*GS<B>
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @copy_from_temp : $@convention(thin) <B> (@inout GS<B>, Builtin.Int64) -> @out GS<B> {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%8 = builtin "cmp_slt_Int64"(%2 : $Builtin.Int64, %2 : $Builtin.Int64) : $Builtin.Int1
|
|
copy_addr %4 to [init] %0 : $*GS<B>
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_back_to_src
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>):
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: struct_element_addr %1
|
|
// CHECK-NEXT: load
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @copy_back_to_src : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>):
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
copy_addr %4 to %1 : $*GS<B>
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_temp
|
|
// CHECK: bb0(%0 : $*B, %1 : $*GS<B>):
|
|
// CHECK-NEXT: [[STK:%.*]] = alloc_stack
|
|
// CHECK-NEXT: copy_addr %1 to [init] [[STK]]
|
|
// CHECK-NEXT: [[INNER:%.*]] = struct_element_addr
|
|
// CHECK-NEXT: copy_addr [take] [[INNER]]
|
|
// CHECK-NEXT: dealloc_stack
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @take_from_temp : $@convention(thin) <B> (@inout B, @inout GS<B>) -> () {
|
|
bb0(%0 : $*B, %1 : $*GS<B>):
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%7 = struct_element_addr %4 : $*GS<B>, #GS._base
|
|
copy_addr [take] %7 to %0: $*B
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_before_load_take
|
|
// CHECK: [[STK:%[0-9]+]] = alloc_stack
|
|
// CHECK: copy_addr [take] %0 to [init] [[STK]]
|
|
// CHECK: store
|
|
// CHECK: load [take] [[STK]]
|
|
// CHECK: return
|
|
// CHECK: } // end sil function 'store_before_load_take'
|
|
sil [ossa] @store_before_load_take : $@convention(thin) (@inout Builtin.NativeObject, @owned Builtin.NativeObject) -> @owned Builtin.NativeObject {
|
|
bb0(%0 : $*Builtin.NativeObject, %1 : @owned $Builtin.NativeObject):
|
|
%stk = alloc_stack $Builtin.NativeObject
|
|
copy_addr [take] %0 to [init] %stk : $*Builtin.NativeObject
|
|
store %1 to [init] %0 : $*Builtin.NativeObject
|
|
%obj = load [take] %stk : $*Builtin.NativeObject
|
|
dealloc_stack %stk : $*Builtin.NativeObject
|
|
return %obj : $Builtin.NativeObject
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_with_take_and_copy_from_src
|
|
// CHECK: bb0({{.*}}):
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: destroy_addr %0
|
|
// CHECK-NEXT: copy_addr [take] %1 to [init] %0
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
// CHECK: } // end sil function 'copy_with_take_and_copy_from_src'
|
|
sil [ossa] @copy_with_take_and_copy_from_src : $@convention(thin) (@inout Builtin.NativeObject, @in Builtin.NativeObject) -> () {
|
|
bb0(%0 : $*Builtin.NativeObject, %1 : $*Builtin.NativeObject):
|
|
%stk = alloc_stack $Builtin.NativeObject
|
|
copy_addr [take] %0 to [init] %stk : $*Builtin.NativeObject
|
|
copy_addr [take] %1 to [init] %0 : $*Builtin.NativeObject
|
|
destroy_addr %stk : $*Builtin.NativeObject
|
|
dealloc_stack %stk : $*Builtin.NativeObject
|
|
%v = tuple ()
|
|
return %v : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_take_and_try_apply
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: try_apply {{%[0-9]+}}(%0)
|
|
// CHECK: bb1({{.*}}):
|
|
// CHECK: destroy_addr %0
|
|
// CHECK: bb2({{.*}}):
|
|
// CHECK: destroy_addr %0
|
|
// CHECK: bb3:
|
|
// CHECK: store %1 to [init] %0
|
|
// CHECK: } // end sil function 'copy_take_and_try_apply'
|
|
sil [ossa] @copy_take_and_try_apply : $@convention(thin) (@inout Klass, @owned Klass) -> () {
|
|
bb0(%0 : $*Klass, %1 : @owned $Klass):
|
|
%2 = alloc_stack $Klass
|
|
copy_addr [take] %0 to [init] %2 : $*Klass
|
|
%5 = function_ref @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error)
|
|
try_apply %5(%2) : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error), normal bb1, error bb2
|
|
bb1(%r : $()):
|
|
br bb3
|
|
bb2(%e : $Error):
|
|
br bb3
|
|
bb3:
|
|
store %1 to [init] %0 : $*Klass
|
|
destroy_addr %2 : $*Klass
|
|
dealloc_stack %2 : $*Klass
|
|
%9 = tuple ()
|
|
return %9 : $()
|
|
}
|
|
|
|
// Currently we cannot optimize this. But in theory it's possible to eliminate
|
|
// both copies.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @copy_and_copy_back
|
|
// CHECK: [[STK:%[0-9]+]] = alloc_stack
|
|
// CHECK: copy_addr [take] %0 to [init] [[STK]]
|
|
// CHECK: copy_addr [take] [[STK]] to [init] %0
|
|
// CHECK: } // end sil function 'copy_and_copy_back'
|
|
sil [ossa] @copy_and_copy_back : $@convention(thin) (@inout Builtin.NativeObject) -> () {
|
|
bb0(%0 : $*Builtin.NativeObject):
|
|
%stk = alloc_stack $Builtin.NativeObject
|
|
copy_addr [take] %0 to [init] %stk : $*Builtin.NativeObject
|
|
copy_addr [take] %stk to [init] %0 : $*Builtin.NativeObject
|
|
dealloc_stack %stk : $*Builtin.NativeObject
|
|
%v = tuple ()
|
|
return %v : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @dont_allow_copy_take_from_projection
|
|
// CHECK: [[STK:%[0-9]+]] = alloc_stack
|
|
// CHECK: copy_addr [take] %1 to [init] [[STK]]
|
|
// CHECK: } // end sil function 'dont_allow_copy_take_from_projection'
|
|
sil [ossa] @dont_allow_copy_take_from_projection : $@convention(thin) (@in Two) -> @out Two {
|
|
bb0(%0 : $*Two, %1 : $*Two):
|
|
%a0 = struct_element_addr %0 : $*Two, #Two.a
|
|
%b0 = struct_element_addr %0 : $*Two, #Two.b
|
|
%s = alloc_stack $Two
|
|
copy_addr [take] %1 to [init] %s : $*Two
|
|
%as = struct_element_addr %s : $*Two, #Two.a
|
|
%bs = struct_element_addr %s : $*Two, #Two.b
|
|
copy_addr [take] %as to [init] %a0 : $*Klass
|
|
copy_addr [take] %bs to [init] %b0 : $*Klass
|
|
dealloc_stack %s : $*Two
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @load_in_wrong_block
|
|
// CHECK: bb0(%0 : $*GS<B>):
|
|
// CHECK-NEXT: alloc_stack
|
|
// CHECK-NEXT: copy_addr
|
|
// CHECK-NEXT: struct_element_addr
|
|
// CHECK-NEXT: br bb1
|
|
// CHECK: return
|
|
sil [ossa] @load_in_wrong_block : $@convention(thin) <B> (@in_guaranteed GS<B>) -> () {
|
|
bb0(%0 : $*GS<B>):
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %0 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
br bb1
|
|
|
|
bb1:
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @projection_in_wrong_block
|
|
// CHECK: bb0(%0 : $*GS<B>):
|
|
// CHECK-NEXT: alloc_stack
|
|
// CHECK-NEXT: copy_addr
|
|
// CHECK-NEXT: br bb1
|
|
// CHECK: return
|
|
sil [ossa] @projection_in_wrong_block : $@convention(thin) <B> (@in_guaranteed GS<B>) -> () {
|
|
bb0(%0 : $*GS<B>):
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %0 to [init] %4 : $*GS<B>
|
|
br bb1
|
|
|
|
bb1:
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_after_load
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
// CHECK-NEXT: [[A1:%.*]] = struct_element_addr %1
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: [[A2:%.*]] = struct_element_addr %1
|
|
// CHECK-NEXT: load [trivial] [[A2]]
|
|
// CHECK-NEXT: store %2 to [trivial] [[A1]]
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @store_after_load : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>, Builtin.Int64) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
store %2 to [trivial] %3 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_after_two_loads
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
// CHECK-NEXT: [[A1:%.*]] = struct_element_addr %1
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: [[A2:%.*]] = struct_element_addr %1
|
|
// CHECK-NEXT: load [trivial] [[A2]]
|
|
// CHECK-NEXT: load [trivial] [[A2]]
|
|
// CHECK-NEXT: store %2 to [trivial] [[A1]]
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @store_after_two_loads : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>, Builtin.Int64) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = load [trivial] %6 : $*Builtin.Int64
|
|
store %2 to [trivial] %3 : $*Builtin.Int64
|
|
%9 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %8 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_before_load
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
// CHECK-NEXT: struct_element_addr %1
|
|
// CHECK-NEXT: [[T:%.*]] = alloc_stack
|
|
// CHECK-NEXT: copy_addr %1 to [init] [[T]]
|
|
// CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]]
|
|
// CHECK-NEXT: store
|
|
// CHECK-NEXT: load [trivial] [[A]]
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: destroy_addr [[T]]
|
|
// CHECK-NEXT: dealloc_stack [[T]]
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @store_before_load : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>, Builtin.Int64) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
store %2 to [trivial] %3 : $*Builtin.Int64
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_between_loads
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
// CHECK-NEXT: struct_element_addr %1
|
|
// CHECK-NEXT: [[T:%.*]] = alloc_stack
|
|
// CHECK-NEXT: copy_addr %1 to [init] [[T]]
|
|
// CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]]
|
|
// CHECK-NEXT: load [trivial] [[A]]
|
|
// CHECK-NEXT: store
|
|
// CHECK-NEXT: load [trivial] [[A]]
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: destroy_addr [[T]]
|
|
// CHECK-NEXT: dealloc_stack [[T]]
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @store_between_loads : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>, Builtin.Int64) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
store %2 to [trivial] %3 : $*Builtin.Int64
|
|
%8 = load [trivial] %6 : $*Builtin.Int64
|
|
%9 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %8 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @potential_store_before_load
|
|
// CHECK: bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
// CHECK-NEXT: struct_element_addr %1
|
|
// CHECK-NEXT: [[T:%.*]] = alloc_stack
|
|
// CHECK-NEXT: copy_addr %1 to [init] [[T]]
|
|
// CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]]
|
|
// CHECK: apply
|
|
// CHECK-NEXT: load [trivial] [[A]]
|
|
// CHECK-NEXT: builtin
|
|
// CHECK-NEXT: destroy_addr [[T]]
|
|
// CHECK-NEXT: dealloc_stack [[T]]
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
sil [ossa] @potential_store_before_load : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout_aliasable GS<B>, Builtin.Int64) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>, %2 : $Builtin.Int64):
|
|
%3 = struct_element_addr %1 : $*GS<B>, #GS._value
|
|
%4 = alloc_stack $GS<B>
|
|
copy_addr %1 to [init] %4 : $*GS<B>
|
|
%6 = struct_element_addr %4 : $*GS<B>, #GS._value
|
|
%f = function_ref @unknown : $@convention(thin) () -> ()
|
|
%a = apply %f() : $@convention(thin) () -> ()
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*GS<B>
|
|
dealloc_stack %4 : $*GS<B>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// Test temp RValue elimination on switches.
|
|
// CHECK-LABEL: sil [ossa] @rvalueSwitch
|
|
// CHECK: bb1:
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: struct_element_addr %1
|
|
// CHECK-NEXT: load
|
|
// CHECK-NOT: alloc_stack $UnfoldSequence
|
|
// CHECK: return
|
|
sil [ossa] @rvalueSwitch : $@convention(method) <Element, State> (@inout UnfoldSequence<Element, State>) -> @out Optional<Element> {
|
|
bb0(%0 : $*Optional<Element>, %1 : $*UnfoldSequence<Element, State>):
|
|
%2 = struct_element_addr %1 : $*UnfoldSequence<Element, State>, #UnfoldSequence._done
|
|
%3 = struct_element_addr %2 : $*Bool, #Bool._value
|
|
%4 = load [trivial] %3 : $*Builtin.Int1
|
|
cond_br %4, bb4, bb1
|
|
|
|
bb1:
|
|
%6 = alloc_stack $UnfoldSequence<Element, State>
|
|
copy_addr %1 to [init] %6 : $*UnfoldSequence<Element, State>
|
|
%8 = struct_element_addr %6 : $*UnfoldSequence<Element, State>, #UnfoldSequence._next
|
|
%9 = load [copy] %8 : $*@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@inout τ_0_0) -> @out Optional<τ_0_1> for <State, Element>
|
|
%10 = alloc_stack $Optional<Element>
|
|
%11 = struct_element_addr %1 : $*UnfoldSequence<Element, State>, #UnfoldSequence._state
|
|
%13 = apply %9(%10, %11) : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@inout τ_0_0) -> @out Optional<τ_0_1> for <State, Element>
|
|
destroy_value %9 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@inout τ_0_0) -> @out Optional<τ_0_1> for <State, Element>
|
|
switch_enum_addr %10 : $*Optional<Element>, case #Optional.some!enumelt: bb3, case #Optional.none!enumelt: bb2
|
|
|
|
bb2:
|
|
destroy_addr %10 : $*Optional<Element>
|
|
dealloc_stack %10 : $*Optional<Element>
|
|
destroy_addr %6 : $*UnfoldSequence<Element, State>
|
|
dealloc_stack %6 : $*UnfoldSequence<Element, State>
|
|
%19 = integer_literal $Builtin.Int1, -1
|
|
%20 = struct $Bool (%19 : $Builtin.Int1)
|
|
store %20 to [trivial] %2 : $*Bool
|
|
%22 = alloc_stack $Optional<Element>
|
|
inject_enum_addr %22 : $*Optional<Element>, #Optional.none!enumelt
|
|
copy_addr [take] %22 to [init] %0 : $*Optional<Element>
|
|
dealloc_stack %22 : $*Optional<Element>
|
|
br bb5
|
|
|
|
bb3:
|
|
%27 = unchecked_take_enum_data_addr %10 : $*Optional<Element>, #Optional.some!enumelt
|
|
%28 = init_enum_data_addr %0 : $*Optional<Element>, #Optional.some!enumelt
|
|
copy_addr [take] %27 to [init] %28 : $*Element
|
|
dealloc_stack %10 : $*Optional<Element>
|
|
destroy_addr %6 : $*UnfoldSequence<Element, State>
|
|
dealloc_stack %6 : $*UnfoldSequence<Element, State>
|
|
inject_enum_addr %0 : $*Optional<Element>, #Optional.some!enumelt
|
|
br bb5
|
|
|
|
bb4:
|
|
%35 = alloc_stack $Optional<Element>
|
|
inject_enum_addr %35 : $*Optional<Element>, #Optional.none!enumelt
|
|
copy_addr [take] %35 to [init] %0 : $*Optional<Element>
|
|
dealloc_stack %35 : $*Optional<Element>
|
|
br bb5
|
|
|
|
bb5:
|
|
%40 = tuple ()
|
|
return %40 : $()
|
|
}
|
|
|
|
// Make sure that we can eliminate temporaries passed via a temporary rvalue to
|
|
// an @in_guaranteed function.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @inguaranteed_no_result : $@convention(thin) (@inout Klass) -> () {
|
|
// CHECK: bb0([[ARG:%.*]] : $*Klass):
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: apply {{%.*}}([[ARG]])
|
|
// CHECK-NOT: destroy_addr
|
|
// CHECK: } // end sil function 'inguaranteed_no_result'
|
|
sil [ossa] @inguaranteed_no_result : $@convention(thin) (@inout Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%1 = alloc_stack $Klass
|
|
copy_addr %0 to [init] %1 : $*Klass
|
|
%5 = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%6 = apply %5(%1) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
destroy_addr %1 : $*Klass
|
|
dealloc_stack %1 : $*Klass
|
|
%9 = tuple ()
|
|
return %9 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @try_apply_argument : $@convention(thin) (@inout Klass) -> () {
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: try_apply {{%[0-9]+}}(%0)
|
|
// CHECK: } // end sil function 'try_apply_argument'
|
|
sil [ossa] @try_apply_argument : $@convention(thin) (@inout Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%1 = alloc_stack $Klass
|
|
copy_addr %0 to [init] %1 : $*Klass
|
|
%5 = function_ref @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error)
|
|
try_apply %5(%1) : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error), normal bb1, error bb2
|
|
bb1(%r : $()):
|
|
br bb3
|
|
bb2(%e : $Error):
|
|
br bb3
|
|
bb3:
|
|
destroy_addr %1 : $*Klass
|
|
dealloc_stack %1 : $*Klass
|
|
%9 = tuple ()
|
|
return %9 : $()
|
|
}
|
|
|
|
// Make sure that we can eliminate temporaries passed via a temporary rvalue to
|
|
// an @in_guaranteed function.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @inguaranteed_with_result : $@convention(thin) (@inout Klass) -> () {
|
|
// CHECK: bb0([[ARG:%.*]] : $*Klass):
|
|
// dead temp
|
|
// CHECK: [[TMP_OUT:%.*]] = alloc_stack $Klass
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: apply {{%.*}}([[TMP_OUT]], [[ARG]])
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: destroy_addr [[TMP_OUT]]
|
|
// CHECK-NOT: destroy_addr
|
|
// CHECK: } // end sil function 'inguaranteed_with_result'
|
|
sil [ossa] @inguaranteed_with_result : $@convention(thin) (@inout Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%1 = alloc_stack $Klass
|
|
%1a = alloc_stack $Klass
|
|
copy_addr %0 to [init] %1 : $*Klass
|
|
%5 = function_ref @inguaranteed_user_with_result : $@convention(thin) (@in_guaranteed Klass) -> @out Klass
|
|
%6 = apply %5(%1a, %1) : $@convention(thin) (@in_guaranteed Klass) -> @out Klass
|
|
destroy_addr %1a : $*Klass
|
|
destroy_addr %1 : $*Klass
|
|
dealloc_stack %1a : $*Klass
|
|
dealloc_stack %1 : $*Klass
|
|
%9 = tuple ()
|
|
return %9 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @non_overlapping_lifetime : $@convention(thin) (@in Klass) -> () {
|
|
// CHECK: bb0([[ARG:%.*]] : $*Klass):
|
|
// CHECK-NEXT: [[TMP:%.*]] = alloc_stack $Klass
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: copy_addr [[ARG]] to [init] [[TMP]]
|
|
// CHECK-NEXT: destroy_addr [[ARG]]
|
|
// CHECK: apply %{{[0-9]*}}([[TMP]])
|
|
// CHECK-NEXT: destroy_addr [[TMP]]
|
|
// CHECK-NEXT: dealloc_stack [[TMP]]
|
|
// CHECK-NEXT: tuple
|
|
// CHECK-NEXT: return
|
|
// CHECK-NEXT: } // end sil function 'non_overlapping_lifetime'
|
|
sil [ossa] @non_overlapping_lifetime : $@convention(thin) (@in Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%1a = alloc_stack $Klass
|
|
|
|
%1 = alloc_stack $Klass
|
|
%2 = alloc_stack $Klass
|
|
copy_addr %0 to [init] %2 : $*Klass
|
|
copy_addr [take] %2 to [init] %1 : $*Klass
|
|
dealloc_stack %2 : $*Klass
|
|
copy_addr %1 to [init] %1a : $*Klass
|
|
destroy_addr %0 : $*Klass
|
|
destroy_addr %1 : $*Klass
|
|
dealloc_stack %1 : $*Klass
|
|
|
|
%3 = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
apply %3(%1a) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
destroy_addr %1a : $*Klass
|
|
dealloc_stack %1a : $*Klass
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
sil [ossa] @$createKlass : $@convention(thin) () -> @out Klass
|
|
sil [ossa] @$appendKlass : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> ()
|
|
|
|
// CHECK-LABEL: sil [ossa] @$overlapping_lifetime_in_function_all : $@convention(thin) () -> @out Klass {
|
|
// CHECK: [[S1:%.*]] = alloc_stack $Klass
|
|
// CHECK: [[S2:%.*]] = alloc_stack $Klass
|
|
// CHECK: copy_addr [[S1]] to [init] [[S2]]
|
|
// CHECK: apply {{%.*}}([[S2]], [[S1]])
|
|
// CHECK: }
|
|
sil [ossa] @$overlapping_lifetime_in_function_all : $@convention(thin) () -> @out Klass {
|
|
bb0(%0 : $*Klass):
|
|
%1 = alloc_stack $Klass
|
|
%2 = function_ref @$createKlass : $@convention(thin) () -> @out Klass
|
|
%3 = apply %2(%1) : $@convention(thin) () -> @out Klass
|
|
%4 = alloc_stack $Klass
|
|
copy_addr %1 to [init] %4 : $*Klass
|
|
%6 = function_ref @$appendKlass : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> ()
|
|
%7 = apply %6(%4, %1) : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> ()
|
|
destroy_addr %4 : $*Klass
|
|
dealloc_stack %4 : $*Klass
|
|
copy_addr [take] %1 to [init] %0 : $*Klass
|
|
dealloc_stack %1 : $*Klass
|
|
%12 = tuple ()
|
|
return %12 : $()
|
|
}
|
|
|
|
sil [ossa] @getP : $@convention(thin) () -> @out Optional<P>
|
|
|
|
// CHECK-LABEL: sil [ossa] @handle_open_existential_addr : $@convention(thin) () -> () {
|
|
// CHECK: [[P:%.*]] = unchecked_take_enum_data_addr
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: open_existential_addr immutable_access [[P]]
|
|
// CHECK: }
|
|
sil [ossa] @handle_open_existential_addr : $@convention(thin) () -> () {
|
|
bb0:
|
|
%2 = alloc_stack $Optional<P>
|
|
%3 = function_ref @getP : $@convention(thin) () -> @out Optional<P>
|
|
%4 = apply %3(%2) : $@convention(thin) () -> @out Optional<P>
|
|
cond_br undef, bb1, bb3
|
|
|
|
bb1:
|
|
%9 = unchecked_take_enum_data_addr %2 : $*Optional<P>, #Optional.some!enumelt
|
|
%10 = alloc_stack $P
|
|
copy_addr %9 to [init] %10 : $*P
|
|
%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self
|
|
%14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self, #P.foo : <Self where Self : P> (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
|
|
%15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self>(%13) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
|
|
destroy_addr %2 : $*Optional<P>
|
|
destroy_addr %10 : $*P
|
|
dealloc_stack %10 : $*P
|
|
dealloc_stack %2 : $*Optional<P>
|
|
br bb2
|
|
|
|
bb2:
|
|
%23 = tuple ()
|
|
return %23 : $()
|
|
|
|
bb3:
|
|
destroy_addr %2 : $*Optional<P>
|
|
dealloc_stack %2 : $*Optional<P>
|
|
br bb2
|
|
}
|
|
// CHECK-LABEL: sil [ossa] @open_existential_addr_blocks_optimization : $@convention(thin) () -> () {
|
|
// CHECK: [[P:%.*]] = alloc_stack $any P
|
|
// CHECK: copy_addr {{.*}} to [init] [[P]]
|
|
// CHECK: }
|
|
sil [ossa] @open_existential_addr_blocks_optimization : $@convention(thin) () -> () {
|
|
bb0:
|
|
%2 = alloc_stack $Optional<P>
|
|
%3 = function_ref @getP : $@convention(thin) () -> @out Optional<P>
|
|
%4 = apply %3(%2) : $@convention(thin) () -> @out Optional<P>
|
|
cond_br undef, bb1, bb3
|
|
|
|
bb1:
|
|
%9 = unchecked_take_enum_data_addr %2 : $*Optional<P>, #Optional.some!enumelt
|
|
%10 = alloc_stack $P
|
|
copy_addr %9 to [init] %10 : $*P
|
|
destroy_addr %2 : $*Optional<P>
|
|
%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("6E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self
|
|
%14 = witness_method $@opened("6E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self, #P.foo : <Self where Self : P> (Self) -> () -> (), %13 : $*@opened("6E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
|
|
%15 = apply %14<@opened("6E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self>(%13) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
|
|
destroy_addr %10 : $*P
|
|
dealloc_stack %10 : $*P
|
|
dealloc_stack %2 : $*Optional<P>
|
|
br bb2
|
|
|
|
bb2:
|
|
%23 = tuple ()
|
|
return %23 : $()
|
|
|
|
bb3:
|
|
destroy_addr %2 : $*Optional<P>
|
|
dealloc_stack %2 : $*Optional<P>
|
|
br bb2
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @witness_method_blocks_optimization : $@convention(thin) () -> () {
|
|
// CHECK: [[P:%.*]] = alloc_stack $any P
|
|
// CHECK: copy_addr {{.*}} to [init] [[P]]
|
|
// CHECK: }
|
|
sil [ossa] @witness_method_blocks_optimization : $@convention(thin) () -> () {
|
|
bb0:
|
|
%2 = alloc_stack $Optional<P>
|
|
%3 = function_ref @getP : $@convention(thin) () -> @out Optional<P>
|
|
%4 = apply %3(%2) : $@convention(thin) () -> @out Optional<P>
|
|
cond_br undef, bb1, bb3
|
|
|
|
bb1:
|
|
%9 = unchecked_take_enum_data_addr %2 : $*Optional<P>, #Optional.some!enumelt
|
|
%10 = alloc_stack $P
|
|
copy_addr %9 to [init] %10 : $*P
|
|
%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("7E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self
|
|
destroy_addr %2 : $*Optional<P>
|
|
%14 = witness_method $@opened("7E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self, #P.foo : <Self where Self : P> (Self) -> () -> (), %13 : $*@opened("7E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
|
|
%15 = apply %14<@opened("7E7A6328-EF75-11E9-A383-D0817AD3F637", P) Self>(%13) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> ()
|
|
destroy_addr %10 : $*P
|
|
dealloc_stack %10 : $*P
|
|
dealloc_stack %2 : $*Optional<P>
|
|
br bb2
|
|
|
|
bb2:
|
|
%23 = tuple ()
|
|
return %23 : $()
|
|
|
|
bb3:
|
|
destroy_addr %2 : $*Optional<P>
|
|
dealloc_stack %2 : $*Optional<P>
|
|
br bb2
|
|
}
|
|
|
|
sil [ossa] @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
|
|
// Now that we support ossa, eliminate the alloc_stack and change the load
|
|
// [take] to a load [copy] in the process.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @copyWithLoadRelease : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> () {
|
|
// CHECK: bb0(%0 : $*Builtin.NativeObject):
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK: [[VAL:%.*]] = load [copy] %0 : $*Builtin.NativeObject
|
|
// CHECK: apply %{{.*}}([[VAL]]) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
// CHECK: destroy_value [[VAL]] : $Builtin.NativeObject
|
|
// CHECK-LABEL: } // end sil function 'copyWithLoadRelease'
|
|
sil [ossa] @copyWithLoadRelease : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> () {
|
|
bb0(%0 : $*Builtin.NativeObject):
|
|
%stk = alloc_stack $Builtin.NativeObject
|
|
copy_addr %0 to [init] %stk : $*Builtin.NativeObject
|
|
%obj = load [take] %stk : $*Builtin.NativeObject
|
|
%f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
%call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
destroy_value %obj : $Builtin.NativeObject
|
|
dealloc_stack %stk : $*Builtin.NativeObject
|
|
%v = tuple ()
|
|
return %v : $()
|
|
}
|
|
|
|
// Remove a copy that is released via a load. Leave the load [take] alone since
|
|
// our copy_addr is taking from source.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @takeWithLoadRelease : $@convention(thin) (@in Builtin.NativeObject) -> () {
|
|
// CHECK: bb0(%0 : $*Builtin.NativeObject):
|
|
// CHECK: [[V:%.*]] = load [take] %0 : $*Builtin.NativeObject
|
|
// CHECK: apply %{{.*}}([[V]]) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
// CHECK: destroy_value [[V]] : $Builtin.NativeObject
|
|
// CHECK-LABEL: } // end sil function 'takeWithLoadRelease'
|
|
sil [ossa] @takeWithLoadRelease : $@convention(thin) (@in Builtin.NativeObject) -> () {
|
|
bb0(%0 : $*Builtin.NativeObject):
|
|
%stk = alloc_stack $Builtin.NativeObject
|
|
copy_addr [take] %0 to [init] %stk : $*Builtin.NativeObject
|
|
%obj = load [take] %stk : $*Builtin.NativeObject
|
|
%f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
%call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
destroy_value %obj : $Builtin.NativeObject
|
|
dealloc_stack %stk : $*Builtin.NativeObject
|
|
%v = tuple ()
|
|
return %v : $()
|
|
}
|
|
|
|
// Do not remove a copy that is released via a load of a projection. This is not
|
|
// the pattern from SILGen that we are targeting, so we reduce the state space by banning the pattern.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @takeWithLoadReleaseOfProjection : $@convention(thin) (@in GS<Builtin.NativeObject>) -> () {
|
|
// CHECK: alloc_stack
|
|
// CHECK: } // end sil function 'takeWithLoadReleaseOfProjection'
|
|
sil [ossa] @takeWithLoadReleaseOfProjection : $@convention(thin) (@in GS<Builtin.NativeObject>) -> () {
|
|
bb0(%0 : $*GS<Builtin.NativeObject>):
|
|
%stk = alloc_stack $GS<Builtin.NativeObject>
|
|
copy_addr [take] %0 to [init] %stk : $*GS<Builtin.NativeObject>
|
|
%proj = struct_element_addr %stk : $*GS<Builtin.NativeObject>, #GS._base
|
|
%obj = load [take] %proj : $*Builtin.NativeObject
|
|
%f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
%call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
|
|
destroy_value %obj : $Builtin.NativeObject
|
|
dealloc_stack %stk : $*GS<Builtin.NativeObject>
|
|
%v = tuple ()
|
|
return %v : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @dont_optimize_with_load_in_different_block
|
|
// CHECK: [[STK:%[0-9]+]] = alloc_stack
|
|
// CHECK: copy_addr %0 to [init] [[STK]]
|
|
// CHECK: bb1:
|
|
// CHECK: load [take] [[STK]]
|
|
// CHECK: bb2:
|
|
// CHECK: copy_addr %1 to %0
|
|
// CHECK: load [take] [[STK]]
|
|
// CHECK: } // end sil function 'dont_optimize_with_load_in_different_block'
|
|
sil [ossa] @dont_optimize_with_load_in_different_block : $@convention(thin) (@inout GS<Builtin.NativeObject>, @in_guaranteed GS<Builtin.NativeObject>) -> @owned GS<Builtin.NativeObject> {
|
|
bb0(%0 : $*GS<Builtin.NativeObject>, %1 : $*GS<Builtin.NativeObject>):
|
|
%f = function_ref @use_gsbase_builtinnativeobject : $@convention(thin) (@guaranteed GS<Builtin.NativeObject>) -> ()
|
|
%stk = alloc_stack $GS<Builtin.NativeObject>
|
|
copy_addr %0 to [init] %stk : $*GS<Builtin.NativeObject>
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
%obj = load [take] %stk : $*GS<Builtin.NativeObject>
|
|
br bb3(%obj : $GS<Builtin.NativeObject>)
|
|
|
|
bb2:
|
|
copy_addr %1 to %0 : $*GS<Builtin.NativeObject>
|
|
%obj2 = load [take] %stk : $*GS<Builtin.NativeObject>
|
|
br bb3(%obj2 : $GS<Builtin.NativeObject>)
|
|
|
|
bb3(%obj3 : @owned $GS<Builtin.NativeObject>):
|
|
dealloc_stack %stk : $*GS<Builtin.NativeObject>
|
|
apply %f(%obj3) : $@convention(thin) (@guaranteed GS<Builtin.NativeObject>) -> ()
|
|
return %obj3 : $GS<Builtin.NativeObject>
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @extendAccessScopeOverLoad
|
|
// CHECK: [[GLOBAL:%.*]] = global_addr @globalString
|
|
// CHECK: [[ACCESS:%.*]] = begin_access [read] [dynamic] [[GLOBAL]]
|
|
// CHECK: [[LOAD:%.*]] = load [copy] [[ACCESS]]
|
|
// CHECK: end_access [[ACCESS]]
|
|
// CHECK: return [[LOAD]]
|
|
// CHECK-LABEL: } // end sil function 'extendAccessScopeOverLoad'
|
|
sil [ossa] @extendAccessScopeOverLoad : $@convention(thin) () -> @owned String {
|
|
bb0:
|
|
%1 = global_addr @globalString : $*String
|
|
%3 = begin_access [read] [dynamic] %1 : $*String
|
|
%4 = alloc_stack $String
|
|
copy_addr %3 to [init] %4 : $*String
|
|
end_access %3 : $*String
|
|
%6 = load [copy] %4 : $*String
|
|
destroy_addr %4 : $*String
|
|
dealloc_stack %4 : $*String
|
|
return %6 : $String
|
|
}
|
|
|
|
sil [ossa] @loadString : $@convention(thin) (@in_guaranteed String) -> @owned String {
|
|
[%0: read v**]
|
|
[global: ]
|
|
bb0(%0 : $*String):
|
|
%1 = load [copy] %0 : $*String
|
|
return %1 : $String
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @extendAccessScopeOverApply
|
|
// CHECK: [[GLOBAL:%.*]] = global_addr @globalString
|
|
// CHECK: [[ACCESS:%.*]] = begin_access [read] [dynamic] [[GLOBAL]]
|
|
// CHECK: [[RESULT:%.*]] = apply {{%[0-9]+}}([[ACCESS]])
|
|
// CHECK: end_access [[ACCESS]]
|
|
// CHECK: return [[RESULT]]
|
|
// CHECK-LABEL: } // end sil function 'extendAccessScopeOverApply'
|
|
sil [ossa] @extendAccessScopeOverApply : $@convention(thin) () -> @owned String {
|
|
bb0:
|
|
%1 = global_addr @globalString : $*String
|
|
%3 = begin_access [read] [dynamic] %1 : $*String
|
|
%4 = alloc_stack $String
|
|
copy_addr %3 to [init] %4 : $*String
|
|
end_access %3 : $*String
|
|
%f = function_ref @loadString : $@convention(thin) (@in_guaranteed String) -> @owned String
|
|
%a = apply %f(%4) : $@convention(thin) (@in_guaranteed String) -> @owned String
|
|
destroy_addr %4 : $*String
|
|
dealloc_stack %4 : $*String
|
|
return %a : $String
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @dontExtendModifyAccess
|
|
// CHECK: begin_access
|
|
// CHECK-NEXT: alloc_stack
|
|
// CHECK-NEXT: copy_addr
|
|
// CHECK-NEXT: end_access
|
|
// CHECK-NEXT: load
|
|
// CHECK-LABEL: } // end sil function 'dontExtendModifyAccess'
|
|
sil [ossa] @dontExtendModifyAccess : $@convention(thin) () -> @owned String {
|
|
bb0:
|
|
%1 = global_addr @globalString : $*String
|
|
%3 = begin_access [modify] [dynamic] %1 : $*String
|
|
%4 = alloc_stack $String
|
|
copy_addr %3 to [init] %4 : $*String
|
|
end_access %3 : $*String
|
|
%6 = load [copy] %4 : $*String
|
|
destroy_addr %4 : $*String
|
|
dealloc_stack %4 : $*String
|
|
return %6 : $String
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @dontExtendAccessScopeOverEndAccess
|
|
// CHECK: begin_access [read] [dynamic] %0 : $*Int
|
|
// CHECK-NEXT: begin_access [read] [dynamic] %{{[0-9]+}} : $*String
|
|
// CHECK-NEXT: alloc_stack
|
|
// CHECK-NEXT: copy_addr
|
|
// CHECK-NEXT: end_access %{{[0-9]+}} : $*String
|
|
// CHECK-NEXT: end_access %{{[0-9]+}} : $*Int
|
|
// CHECK-NEXT: load
|
|
// CHECK-LABEL: } // end sil function 'dontExtendAccessScopeOverEndAccess'
|
|
sil [ossa] @dontExtendAccessScopeOverEndAccess : $@convention(thin) (@in_guaranteed Int) -> @owned String {
|
|
bb0(%0 : $*Int):
|
|
%1 = global_addr @globalString : $*String
|
|
%2 = begin_access [read] [dynamic] %0 : $*Int
|
|
%3 = begin_access [read] [dynamic] %1 : $*String
|
|
%4 = alloc_stack $String
|
|
copy_addr %3 to [init] %4 : $*String
|
|
end_access %3 : $*String
|
|
end_access %2 : $*Int
|
|
%6 = load [copy] %4 : $*String
|
|
destroy_addr %4 : $*String
|
|
dealloc_stack %4 : $*String
|
|
return %6 : $String
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @dontExtendAccessScopeOverBeginAccess : $@convention(thin) (@in Klass) -> () {
|
|
// CHECK: bb0(%0 : $*Klass):
|
|
// CHECK: [[STACK:%.*]] = alloc_stack $Klass
|
|
// CHECK: [[ACCESS:%.*]] = begin_access [read] [static] [[STACK]] : $*Klass
|
|
// CHECK: apply %{{.*}}([[ACCESS]]) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
// CHECK: destroy_addr [[STACK]] : $*Klass
|
|
// CHECK-LABEL: } // end sil function 'dontExtendAccessScopeOverBeginAccess'
|
|
sil [ossa] @dontExtendAccessScopeOverBeginAccess : $@convention(thin) (@in Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%stack = alloc_stack $Klass
|
|
%access = begin_access [read] [static] %0 : $*Klass
|
|
copy_addr [take] %access to [init] %stack : $*Klass
|
|
end_access %access : $*Klass
|
|
%f = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%access2 = begin_access [read] [static] %stack : $*Klass
|
|
%call = apply %f(%access2) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
end_access %access2 : $*Klass
|
|
destroy_addr %stack : $*Klass
|
|
dealloc_stack %stack : $*Klass
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
|
|
// CHECK-LABEL: sil [ossa] @dont_optimize_with_modify_inside_access
|
|
// CHECK: [[STK:%[0-9]+]] = alloc_stack $Klass
|
|
// CHECK: copy_addr %0 to [init] [[STK]]
|
|
// CHECK: begin_access [read] [static] [[STK]]
|
|
// CHECK-LABEL: } // end sil function 'dont_optimize_with_modify_inside_access'
|
|
sil [ossa] @dont_optimize_with_modify_inside_access : $@convention(thin) (@inout Klass, @owned Klass) -> () {
|
|
bb0(%0 : $*Klass, %1 : @owned $Klass):
|
|
%stack = alloc_stack $Klass
|
|
copy_addr %0 to [init] %stack : $*Klass
|
|
%f = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%access = begin_access [read] [static] %stack : $*Klass
|
|
store %1 to [assign] %0 : $*Klass // This store prevents the optimization
|
|
%call = apply %f(%access) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
end_access %access : $*Klass
|
|
destroy_addr %stack : $*Klass
|
|
dealloc_stack %stack : $*Klass
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// Just check that we don't crash here.
|
|
// Currently this pattern is not optimized, but we might in future.
|
|
sil [ossa] @dont_extend_access_scope_over_term_inst : $@convention(thin) (@guaranteed Klass) -> () {
|
|
bb0(%0 : @guaranteed $Klass):
|
|
%1 = ref_element_addr %0 : $Klass, #Klass.i
|
|
%2 = begin_access [read] [dynamic] %1 : $*Int
|
|
%3 = alloc_stack $Int
|
|
copy_addr %2 to [init] %3 : $*Int
|
|
end_access %2 : $*Int
|
|
%6 = function_ref @readonly_throwing_func : $@convention(thin) (@in_guaranteed Int) -> @error Error
|
|
try_apply %6(%3) : $@convention(thin) (@in_guaranteed Int) -> @error Error, normal bb1, error bb2
|
|
bb1(%8 : $()):
|
|
destroy_addr %3 : $*Int
|
|
dealloc_stack %3 : $*Int
|
|
br bb3
|
|
bb2(%12 : @owned $Error):
|
|
destroy_addr %3 : $*Int
|
|
dealloc_stack %3 : $*Int
|
|
destroy_value %12 : $Error
|
|
br bb3
|
|
bb3:
|
|
%17 = tuple ()
|
|
return %17 : $()
|
|
}
|
|
|
|
|
|
/////////////////
|
|
// Store Tests //
|
|
/////////////////
|
|
|
|
////////////////////////////////////////
|
|
// Unchecked Take Enum Data Addr Inst //
|
|
////////////////////////////////////////
|
|
|
|
// Make sure we only handle this in the copy_addr case. With time, we should
|
|
// also handle the store case.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @unchecked_take_enum_data_addr_rvalue_simple : $@convention(thin) <B> (@in_guaranteed Optional<GS<B>>, @inout Optional<GS<B>>) -> () {
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK: } // end sil function 'unchecked_take_enum_data_addr_rvalue_simple'
|
|
sil [ossa] @unchecked_take_enum_data_addr_rvalue_simple : $@convention(thin) <B> (@in_guaranteed Optional<GS<B>>, @inout Optional<GS<B>>) -> () {
|
|
bb0(%0 : $*Optional<GS<B>>, %1 : $*Optional<GS<B>>):
|
|
%0a = unchecked_take_enum_data_addr %0 : $*Optional<GS<B>>, #Optional.some!enumelt
|
|
%2 = struct_element_addr %0a : $*GS<B>, #GS._value
|
|
%3 = load [trivial] %2 : $*Builtin.Int64
|
|
%4 = alloc_stack $Optional<GS<B>>
|
|
copy_addr %1 to [init] %4 : $*Optional<GS<B>>
|
|
%4a = unchecked_take_enum_data_addr %4 : $*Optional<GS<B>>, #Optional.some!enumelt
|
|
%6 = struct_element_addr %4a : $*GS<B>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%3 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*Optional<GS<B>>
|
|
dealloc_stack %4 : $*Optional<GS<B>>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// We do not support this today, since I am still bringing up store support.
|
|
//
|
|
// CHECK-LABEL: sil [ossa] @unchecked_take_enum_data_addr_store_rvalue_simple : $@convention(thin) (@in_guaranteed Optional<GS<Klass>>, @owned Optional<GS<Klass>>) -> () {
|
|
// CHECK: alloc_stack
|
|
// CHECK: } // end sil function 'unchecked_take_enum_data_addr_store_rvalue_simple'
|
|
sil [ossa] @unchecked_take_enum_data_addr_store_rvalue_simple : $@convention(thin) (@in_guaranteed Optional<GS<Klass>>, @owned Optional<GS<Klass>>) -> () {
|
|
bb0(%0 : $*Optional<GS<Klass>>, %1 : @owned $Optional<GS<Klass>>):
|
|
%0a = unchecked_take_enum_data_addr %0 : $*Optional<GS<Klass>>, #Optional.some!enumelt
|
|
%2 = struct_element_addr %0a : $*GS<Klass>, #GS._value
|
|
%3 = load [trivial] %2 : $*Builtin.Int64
|
|
%4 = alloc_stack $Optional<GS<Klass>>
|
|
store %1 to [init] %4 : $*Optional<GS<Klass>>
|
|
%4a = unchecked_take_enum_data_addr %4 : $*Optional<GS<Klass>>, #Optional.some!enumelt
|
|
%6 = struct_element_addr %4a : $*GS<Klass>, #GS._value
|
|
%7 = load [trivial] %6 : $*Builtin.Int64
|
|
%8 = builtin "cmp_slt_Int64"(%3 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1
|
|
destroy_addr %4 : $*Optional<GS<Klass>>
|
|
dealloc_stack %4 : $*Optional<GS<Klass>>
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @eliminate_fix_lifetime_on_dest_copyaddr : $@convention(thin) (@inout Klass) -> () {
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK: fix_lifetime %0
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK: } // end sil function 'eliminate_fix_lifetime_on_dest_copyaddr'
|
|
sil [ossa] @eliminate_fix_lifetime_on_dest_copyaddr : $@convention(thin) (@inout Klass) -> () {
|
|
bb0(%0 : $*Klass):
|
|
%3 = alloc_stack $Klass
|
|
copy_addr %0 to [init] %3 : $*Klass
|
|
fix_lifetime %3 : $*Klass
|
|
destroy_addr %3 : $*Klass
|
|
dealloc_stack %3 : $*Klass
|
|
%9999 = tuple()
|
|
return %9999 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @test_yield
|
|
// CHECK: [[TA:%[0-9]+]] = ref_tail_addr
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: yield [[TA]]
|
|
// CHECK: } // end sil function 'test_yield'
|
|
sil [ossa] @test_yield : $@yield_once @convention(thin) (@guaranteed Klass) -> @yields @in_guaranteed Two {
|
|
bb0(%0 : @guaranteed $Klass):
|
|
%1 = alloc_stack $Two
|
|
%2 = ref_tail_addr [immutable] %0 : $Klass, $Two
|
|
copy_addr %2 to [init] %1 : $*Two
|
|
yield %1 : $*Two, resume bb1, unwind bb2
|
|
|
|
bb1:
|
|
destroy_addr %1 : $*Two
|
|
dealloc_stack %1 : $*Two
|
|
%90 = tuple ()
|
|
return %90 : $()
|
|
|
|
bb2:
|
|
destroy_addr %1 : $*Two
|
|
dealloc_stack %1 : $*Two
|
|
unwind
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_load_take : {{.*}} {
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getKlass
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result
|
|
// CHECK: [[INSTANCE_1:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[ADDR:%[^,]+]] = alloc_stack $Klass
|
|
// CHECK: store [[INSTANCE_1]] to [init] [[ADDR]]
|
|
// CHECK: apply [[USER]]([[ADDR]])
|
|
// CHECK: [[INSTANCE_2:%[^,]+]] = load [take] [[ADDR]]
|
|
// CHECK: dealloc_stack [[ADDR]]
|
|
// CHECK: return [[INSTANCE_2]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_load_take'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_load_take : $() -> @owned Klass {
|
|
%getKlass = function_ref @getKlass : $@convention(thin) () -> @owned Klass
|
|
%user = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%instance_1 = apply %getKlass() : $@convention(thin) () -> @owned Klass
|
|
%src = alloc_stack $Klass
|
|
store %instance_1 to [init] %src : $*Klass
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%tmp = alloc_stack $Klass
|
|
copy_addr [take] %src to [init] %tmp : $*Klass
|
|
%instance_2 = load [take] %tmp : $*Klass
|
|
dealloc_stack %tmp : $*Klass
|
|
dealloc_stack %src : $*Klass
|
|
return %instance_2 : $Klass
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_copy_addr_take : {{.*}} {
|
|
// CHECK: {{bb[0-9]+}}([[OUT:%[^,]+]] :
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getKlass
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result
|
|
// CHECK: [[INSTANCE_1:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $Klass
|
|
// CHECK: store [[INSTANCE_1]] to [init] [[SRC]]
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: copy_addr [take] [[SRC]] to [init] [[OUT]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_copy_addr_take'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_copy_addr_take : $() -> @out Klass {
|
|
entry(%out : $*Klass):
|
|
%getKlass = function_ref @getKlass : $@convention(thin) () -> @owned Klass
|
|
%user = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%instance_1 = apply %getKlass() : $@convention(thin) () -> @owned Klass
|
|
%src = alloc_stack $Klass
|
|
store %instance_1 to [init] %src : $*Klass
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%tmp = alloc_stack $Klass
|
|
copy_addr [take] %src to [init] %tmp : $*Klass
|
|
copy_addr [take] %tmp to [init] %out : $*Klass
|
|
dealloc_stack %tmp : $*Klass
|
|
dealloc_stack %src : $*Klass
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_field_load_copy : {{.*}} {
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getNonTrivialStruct
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result_NTS
|
|
// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $NonTrivialStruct
|
|
// CHECK: store [[INSTANCE]] to [init] [[SRC]]
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: [[FIELD_ADDR:%[^,]+]] = struct_element_addr [[SRC]]
|
|
// CHECK: [[FIELD:%[^,]+]] = load [copy] [[FIELD_ADDR]]
|
|
// CHECK: destroy_addr [[SRC]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK: return [[FIELD]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_field_load_copy'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_field_load_copy : $() -> @owned Klass {
|
|
%get = function_ref @getNonTrivialStruct : $@convention(thin) () -> @owned NonTrivialStruct
|
|
%user = function_ref @inguaranteed_user_without_result_NTS : $@convention(thin) (@in_guaranteed NonTrivialStruct) -> ()
|
|
%instance_1 = apply %get() : $@convention(thin) () -> @owned NonTrivialStruct
|
|
%src = alloc_stack $NonTrivialStruct
|
|
store %instance_1 to [init] %src : $*NonTrivialStruct
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed NonTrivialStruct) -> ()
|
|
%tmp = alloc_stack $NonTrivialStruct
|
|
copy_addr [take] %src to [init] %tmp : $*NonTrivialStruct
|
|
%field_addr = struct_element_addr %tmp : $*NonTrivialStruct, #NonTrivialStruct.val
|
|
%field = load [copy] %field_addr : $*Klass
|
|
destroy_addr %tmp : $*NonTrivialStruct
|
|
dealloc_stack %tmp : $*NonTrivialStruct
|
|
dealloc_stack %src : $*NonTrivialStruct
|
|
return %field : $Klass
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_field_copy_addr_take : {{.*}} {
|
|
// CHECK: {{bb[0-9]+}}([[OUT:%[^,]+]] :
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getNonTrivialStruct
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result_NTS
|
|
// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $NonTrivialStruct
|
|
// CHECK: store [[INSTANCE]] to [init] [[SRC]]
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: [[FIELD_ADDR:%[^,]+]] = struct_element_addr [[SRC]]
|
|
// CHECK: copy_addr [[FIELD_ADDR]] to [init] [[OUT]]
|
|
// CHECK: destroy_addr [[SRC]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_field_copy_addr_take'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_field_copy_addr_take : $() -> @out Klass {
|
|
entry(%out : $*Klass):
|
|
%getNonTrivialStruct = function_ref @getNonTrivialStruct : $@convention(thin) () -> @owned NonTrivialStruct
|
|
%user = function_ref @inguaranteed_user_without_result_NTS : $@convention(thin) (@in_guaranteed NonTrivialStruct) -> ()
|
|
%instance_1 = apply %getNonTrivialStruct() : $@convention(thin) () -> @owned NonTrivialStruct
|
|
%src = alloc_stack $NonTrivialStruct
|
|
store %instance_1 to [init] %src : $*NonTrivialStruct
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed NonTrivialStruct) -> ()
|
|
%tmp = alloc_stack $NonTrivialStruct
|
|
copy_addr [take] %src to [init] %tmp : $*NonTrivialStruct
|
|
%field_addr = struct_element_addr %tmp : $*NonTrivialStruct, #NonTrivialStruct.val
|
|
copy_addr %field_addr to [init] %out : $*Klass
|
|
destroy_addr %tmp : $*NonTrivialStruct
|
|
dealloc_stack %tmp : $*NonTrivialStruct
|
|
dealloc_stack %src : $*NonTrivialStruct
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_load_copy : {{.*}} {
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getKlass
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result
|
|
// CHECK: [[INSTANCE_1:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $Klass
|
|
// CHECK: store [[INSTANCE_1]] to [init] [[SRC]]
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: [[INSTANCE_2:%[^,]+]] = load [copy] [[SRC]]
|
|
// CHECK: destroy_addr [[SRC]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK: return [[INSTANCE_2]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_load_copy'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_load_copy : $() -> @owned Klass {
|
|
%getKlass = function_ref @getKlass : $@convention(thin) () -> @owned Klass
|
|
%user = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%instance_1 = apply %getKlass() : $@convention(thin) () -> @owned Klass
|
|
%src = alloc_stack $Klass
|
|
store %instance_1 to [init] %src : $*Klass
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%tmp = alloc_stack $Klass
|
|
copy_addr [take] %src to [init] %tmp : $*Klass
|
|
%instance_2 = load [copy] %tmp : $*Klass
|
|
destroy_addr %tmp : $*Klass
|
|
dealloc_stack %tmp : $*Klass
|
|
dealloc_stack %src : $*Klass
|
|
return %instance_2 : $Klass
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_copy_addr_copy : {{.*}} {
|
|
// CHECK: {{bb[0-9]+}}([[OUT:%[^,]+]] : $*Klass):
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getKlass
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result
|
|
// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $Klass
|
|
// CHECK: store [[INSTANCE]] to [init] [[SRC]]
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: copy_addr [[SRC]] to [init] [[OUT]]
|
|
// CHECK: destroy_addr [[SRC]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_copy_addr_copy'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_copy_addr_copy : $() -> @out Klass {
|
|
entry(%out : $*Klass):
|
|
%getKlass = function_ref @getKlass : $@convention(thin) () -> @owned Klass
|
|
%user = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%instance_1 = apply %getKlass() : $@convention(thin) () -> @owned Klass
|
|
%src = alloc_stack $Klass
|
|
store %instance_1 to [init] %src : $*Klass
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed Klass) -> ()
|
|
%tmp = alloc_stack $Klass
|
|
copy_addr [take] %src to [init] %tmp : $*Klass
|
|
copy_addr %tmp to [init] %out : $*Klass
|
|
destroy_addr %tmp : $*Klass
|
|
dealloc_stack %tmp : $*Klass
|
|
dealloc_stack %src : $*Klass
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @take_from_original_copy_addr__final_use_apply__move_only : {{.*}} {
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getMOS
|
|
// CHECK: [[USER:%[^,]+]] = function_ref @inguaranteed_user_without_result_MOS
|
|
// CHECK: [[INSTANCE:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $MOS
|
|
// CHECK: store [[INSTANCE]] to [init] [[SRC]]
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: apply [[USER]]([[SRC]])
|
|
// CHECK: destroy_addr [[SRC]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK-LABEL: } // end sil function 'take_from_original_copy_addr__final_use_apply__move_only'
|
|
sil [ossa] @take_from_original_copy_addr__final_use_apply__move_only : $() -> () {
|
|
%getMOS = function_ref @getMOS : $@convention(thin) () -> @owned MOS
|
|
%user = function_ref @inguaranteed_user_without_result_MOS : $@convention(thin) (@in_guaranteed MOS) -> ()
|
|
%instance_1 = apply %getMOS() : $@convention(thin) () -> @owned MOS
|
|
%src = alloc_stack $MOS
|
|
store %instance_1 to [init] %src : $*MOS
|
|
apply %user(%src) : $@convention(thin) (@in_guaranteed MOS) -> ()
|
|
%tmp = alloc_stack $MOS
|
|
copy_addr [take] %src to [init] %tmp : $*MOS
|
|
apply %user(%tmp) : $@convention(thin) (@in_guaranteed MOS) -> ()
|
|
destroy_addr %tmp : $*MOS
|
|
dealloc_stack %tmp : $*MOS
|
|
dealloc_stack %src : $*MOS
|
|
%tuple = tuple ()
|
|
return %tuple : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @test_temprvoborrowboundary1 :
|
|
// CHECK-NOT: alloc_stack
|
|
// CHECK-NOT: copy_addr
|
|
// CHECK: [[ADDR:%.*]] = unchecked_take_enum_data_addr %0 : $*FakeOptional<NonTrivialStruct>, #FakeOptional.some!enumelt
|
|
// CHECK: [[ELE:%.*]] = struct_element_addr [[ADDR]] : $*NonTrivialStruct, #NonTrivialStruct.val
|
|
// CHECK: [[LD:%.*]] = load_borrow [[ELE]] : $*Klass
|
|
// CHECK: end_borrow [[LD]] : $Klass
|
|
// CHECK: destroy_addr [[ADDR]]
|
|
// CHECK: } // end sil function 'test_temprvoborrowboundary1'
|
|
sil [ossa] @test_temprvoborrowboundary1 : $@convention(thin) (@in FakeOptional<NonTrivialStruct>) -> () {
|
|
bb0(%0 : $*FakeOptional<NonTrivialStruct>):
|
|
%1 = alloc_stack $NonTrivialStruct
|
|
%2 = unchecked_take_enum_data_addr %0 : $*FakeOptional<NonTrivialStruct>, #FakeOptional.some!enumelt
|
|
copy_addr [take] %2 to [init] %1 : $*NonTrivialStruct
|
|
%4 = struct_element_addr %1 : $*NonTrivialStruct, #NonTrivialStruct.val
|
|
%5 = load_borrow %4 : $*Klass
|
|
end_borrow %5 : $Klass
|
|
destroy_addr %1 : $*NonTrivialStruct
|
|
dealloc_stack %1 : $*NonTrivialStruct
|
|
%res = tuple ()
|
|
return %res : $()
|
|
}
|
|
|
|
// This does not get optimized because of the end borrow in a different block
|
|
// CHECK-LABEL: sil [ossa] @test_temprvoborrowboundary2 :
|
|
// CHECK: copy_addr
|
|
// CHECK: } // end sil function 'test_temprvoborrowboundary2'
|
|
sil [ossa] @test_temprvoborrowboundary2 : $@convention(thin) (@in FakeOptional<NonTrivialStruct>) -> () {
|
|
bb0(%0 : $*FakeOptional<NonTrivialStruct>):
|
|
%1 = alloc_stack $NonTrivialStruct
|
|
%2 = unchecked_take_enum_data_addr %0 : $*FakeOptional<NonTrivialStruct>, #FakeOptional.some!enumelt
|
|
copy_addr [take] %2 to [init] %1 : $*NonTrivialStruct
|
|
%4 = struct_element_addr %1 : $*NonTrivialStruct, #NonTrivialStruct.val
|
|
%5 = load_borrow %4 : $*Klass
|
|
br bb1(%5 : $Klass)
|
|
|
|
bb1(%6 : @guaranteed $Klass):
|
|
end_borrow %6 : $Klass
|
|
destroy_addr %1 : $*NonTrivialStruct
|
|
dealloc_stack %1 : $*NonTrivialStruct
|
|
%res = tuple ()
|
|
return %res : $()
|
|
}
|
|
|
|
// Lexical alloc_stacks can still be eliminated if their sources are guaranteed
|
|
// function arguments (because those are lexical and have a live range that
|
|
// contains the live range of the value stored into the alloc stack).
|
|
// CHECK-LABEL: sil [ossa] @copy_addr__lexical_alloc_stack_temporary__guaranteed_function_arg : {{.*}} {
|
|
// CHECK-OPT-NOT: alloc_stack
|
|
// CHECK-ONONE: alloc_stack
|
|
// CHECK-LABEL: } // end sil function 'copy_addr__lexical_alloc_stack_temporary__guaranteed_function_arg'
|
|
sil [ossa] @copy_addr__lexical_alloc_stack_temporary__guaranteed_function_arg : $@convention(thin) (@guaranteed OtherClass) -> () {
|
|
entry(%instance : @guaranteed $OtherClass):
|
|
%field_ptr = ref_element_addr %instance : $OtherClass, #OtherClass.klass
|
|
%addr = alloc_stack [lexical] $Klass
|
|
copy_addr %field_ptr to [init] %addr : $*Klass
|
|
destroy_addr %addr : $*Klass
|
|
dealloc_stack %addr : $*Klass
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_addr__lexical_alloc_stack_temporary_in_guaranteed_function_arg : {{.*}} {
|
|
// CHECK-OPT-NOT: alloc_stack
|
|
// CHECK-ONONE: alloc_stack
|
|
// CHECK-LABEL: } // end sil function 'copy_addr__lexical_alloc_stack_temporary_in_guaranteed_function_arg'
|
|
sil [ossa] @copy_addr__lexical_alloc_stack_temporary_in_guaranteed_function_arg : $@convention(thin) (@in_guaranteed Klass) -> () {
|
|
entry(%instance : $*Klass):
|
|
%addr = alloc_stack [lexical] $Klass
|
|
copy_addr %instance to [init] %addr : $*Klass
|
|
destroy_addr %addr : $*Klass
|
|
dealloc_stack %addr : $*Klass
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_addr__vardecl_alloc_stack_temporary_in_guaranteed_function_arg : {{.*}} {
|
|
// CHECK-OPT-NOT: alloc_stack
|
|
// CHECK-ONONE: alloc_stack
|
|
// CHECK-LABEL: } // end sil function 'copy_addr__vardecl_alloc_stack_temporary_in_guaranteed_function_arg'
|
|
sil [ossa] @copy_addr__vardecl_alloc_stack_temporary_in_guaranteed_function_arg : $@convention(thin) (@in_guaranteed Klass) -> () {
|
|
entry(%instance : $*Klass):
|
|
%addr = alloc_stack [var_decl] $Klass
|
|
copy_addr %instance to [init] %addr : $*Klass
|
|
destroy_addr %addr : $*Klass
|
|
dealloc_stack %addr : $*Klass
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @copy_addr__lexical_alloc_stack_temporary__inout_function_arg : {{.*}} {
|
|
// CHECK-OPT-NOT: alloc_stack
|
|
// CHECK-ONONE: alloc_stack
|
|
// CHECK-LABEL: } // end sil function 'copy_addr__lexical_alloc_stack_temporary__inout_function_arg'
|
|
sil [ossa] @copy_addr__lexical_alloc_stack_temporary__inout_function_arg : $@convention(thin) (@inout NonTrivialStruct) -> () {
|
|
entry(%instance : $*NonTrivialStruct):
|
|
%field_ptr = struct_element_addr %instance : $*NonTrivialStruct, #NonTrivialStruct.val
|
|
%addr = alloc_stack [lexical] $Klass
|
|
copy_addr %field_ptr to [init] %addr : $*Klass
|
|
destroy_addr %addr : $*Klass
|
|
dealloc_stack %addr : $*Klass
|
|
%retval = tuple ()
|
|
return %retval : $()
|
|
}
|
|
|
|
// Verify that no copy of an instance of the move-only type MOS is introduced.
|
|
// CHECK-LABEL: sil hidden [ossa] @dont_copy_move_only_struct : {{.*}} {
|
|
// CHECK: [[SRC:%[^,]+]] = alloc_stack $MOS
|
|
// CHECK: [[GET:%[^,]+]] = function_ref @getMOS
|
|
// CHECK: [[INSTANCE_1:%[^,]+]] = apply [[GET]]()
|
|
// CHECK: store [[INSTANCE_1]] to [init] [[SRC]]
|
|
// CHECK: [[INSTANCE_2:%[^,]+]] = load [take] [[SRC]]
|
|
// CHECK: store [[INSTANCE_2]] to [init] [[SRC]]
|
|
// CHECK: [[INSTANCE_3:%[^,]+]] = load [take] [[SRC]]
|
|
// CHECK: dealloc_stack [[SRC]]
|
|
// CHECK: return [[INSTANCE_3]]
|
|
// CHECK-LABEL: } // end sil function 'dont_copy_move_only_struct'
|
|
sil hidden [ossa] @dont_copy_move_only_struct : $@convention(thin) () -> @owned MOS {
|
|
bb0:
|
|
%src = alloc_stack $MOS
|
|
%getMOS = function_ref @getMOS : $@convention(thin) () -> @owned MOS
|
|
%instance_1 = apply %getMOS() : $@convention(thin) () -> @owned MOS
|
|
store %instance_1 to [init] %src : $*MOS
|
|
%tmp = alloc_stack $MOS
|
|
copy_addr [take] %src to [init] %tmp : $*MOS
|
|
%instance_2 = load [take] %tmp : $*MOS
|
|
store %instance_2 to [init] %src : $*MOS
|
|
%instance_3 = load [take] %src : $*MOS
|
|
dealloc_stack %tmp : $*MOS
|
|
dealloc_stack %src : $*MOS
|
|
return %instance_3 : $MOS
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @dont_optimize_use_before_copy :
|
|
// CHECK: copy_addr
|
|
// CHECK-LABEL: } // end sil function 'dont_optimize_use_before_copy'
|
|
sil [ossa] @dont_optimize_use_before_copy : $@convention(thin) <B> (@in_guaranteed GS<B>, @inout GS<B>) -> () {
|
|
bb0(%0 : $*GS<B>, %1 : $*GS<B>):
|
|
%2 = alloc_stack $GS<B>
|
|
%3 = struct_element_addr %2 : $*GS<B>, #GS._value
|
|
copy_addr %1 to [init] %2 : $*GS<B>
|
|
%5 = load [trivial] %3 : $*Builtin.Int64
|
|
%6 = builtin "cmp_slt_Int64"(%5 : $Builtin.Int64, %5 : $Builtin.Int64) : $Builtin.Int1
|
|
copy_addr %2 to %1 : $*GS<B>
|
|
destroy_addr %2 : $*GS<B>
|
|
dealloc_stack %2 : $*GS<B>
|
|
%10 = tuple ()
|
|
return %10 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @test_read_only_coroutine :
|
|
// CHECK: copy_addr
|
|
// CHECK: copy_addr
|
|
// CHECK-LABEL: } // end sil function 'test_read_only_coroutine'
|
|
sil [ossa] @test_read_only_coroutine : $@convention(thin) () -> @out P {
|
|
bb0(%0 : $*P):
|
|
%5 = function_ref @read_only_coroutine : $@yield_once @convention(thin) () -> @yields @in_guaranteed P
|
|
(%6, %7) = begin_apply %5() : $@yield_once @convention(thin) () -> @yields @in_guaranteed P
|
|
%8 = alloc_stack $P
|
|
copy_addr %6 to [init] %8
|
|
%10 = end_apply %7 as $()
|
|
copy_addr [take] %8 to [init] %0
|
|
dealloc_stack %8
|
|
%15 = tuple ()
|
|
return %15
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_to_temp :
|
|
// CHECK: bb0(%0 : $*Klass):
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: [[L:%.*]] = load [copy] %0
|
|
// CHECK-NEXT: return [[L]]
|
|
// CHECK-LABEL: } // end sil function 'store_to_temp'
|
|
sil [ossa] @store_to_temp : $@convention(thin) (@in_guaranteed Klass) -> @owned Klass {
|
|
bb0(%0 : $*Klass):
|
|
%1 = load [copy] %0
|
|
%2 = alloc_stack $Klass
|
|
store %1 to [init] %2
|
|
%4 = load [copy] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_to_temp_take :
|
|
// CHECK: bb0(%0 : $*Klass):
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: [[L:%.*]] = load [copy] %0
|
|
// CHECK-NEXT: destroy_addr %0
|
|
// CHECK-NEXT: return [[L]]
|
|
// CHECK-LABEL: } // end sil function 'store_to_temp_take'
|
|
sil [ossa] @store_to_temp_take : $@convention(thin) (@in Klass) -> @owned Klass {
|
|
bb0(%0 : $*Klass):
|
|
%1 = load [take] %0
|
|
%2 = alloc_stack $Klass
|
|
store %1 to [init] %2
|
|
%4 = load [copy] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_to_temp_trivial :
|
|
// CHECK: bb0(%0 : $*Int):
|
|
// CHECK-ONONE-NEXT: debug_step
|
|
// CHECK-NEXT: [[L:%.*]] = load [trivial] %0
|
|
// CHECK-NEXT: return [[L]]
|
|
// CHECK-LABEL: } // end sil function 'store_to_temp_trivial'
|
|
sil [ossa] @store_to_temp_trivial : $@convention(thin) (@in_guaranteed Int) -> @owned Int {
|
|
bb0(%0 : $*Int):
|
|
%1 = load [trivial] %0
|
|
%2 = alloc_stack $Int
|
|
store %1 to [trivial] %2
|
|
%4 = load [trivial] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_with_early_destroy_of_src :
|
|
// CHECK: alloc_stack
|
|
// CHECK-NEXT: store %1
|
|
// CHECK-LABEL: } // end sil function 'store_with_early_destroy_of_src'
|
|
sil [ossa] @store_with_early_destroy_of_src : $@convention(thin) (@in Klass) -> @owned Klass {
|
|
bb0(%0 : $*Klass):
|
|
%1 = load [copy] %0
|
|
fix_lifetime %0
|
|
destroy_addr %0
|
|
%2 = alloc_stack $Klass
|
|
store %1 to [init] %2
|
|
%4 = load [copy] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_with_load_in_different_block :
|
|
// CHECK: alloc_stack
|
|
// CHECK-NEXT: store %1
|
|
// CHECK-LABEL: } // end sil function 'store_with_load_in_different_block'
|
|
sil [ossa] @store_with_load_in_different_block : $@convention(thin) (@in Klass) -> @owned Klass {
|
|
bb0(%0 : $*Klass):
|
|
%1 = load [copy] %0
|
|
br bb1
|
|
bb1:
|
|
fix_lifetime %0
|
|
destroy_addr %0
|
|
br bb2
|
|
bb2:
|
|
%2 = alloc_stack $Klass
|
|
store %1 to [init] %2
|
|
%4 = load [copy] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_with_load_with_multiple_uses :
|
|
// CHECK: fix_lifetime
|
|
// CHECK: alloc_stack
|
|
// CHECK-NEXT: store %1
|
|
// CHECK-LABEL: } // end sil function 'store_with_load_with_multiple_uses'
|
|
sil [ossa] @store_with_load_with_multiple_uses : $@convention(thin) (@in_guaranteed Klass) -> @owned Klass {
|
|
bb0(%0 : $*Klass):
|
|
%1 = load [copy] %0
|
|
fix_lifetime %1
|
|
%2 = alloc_stack $Klass
|
|
store %1 to [init] %2
|
|
%4 = load [copy] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|
|
// CHECK-LABEL: sil [ossa] @store_without_load :
|
|
// CHECK: alloc_stack
|
|
// CHECK-NEXT: store %0
|
|
// CHECK-LABEL: } // end sil function 'store_without_load'
|
|
sil [ossa] @store_without_load : $@convention(thin) (@owned Klass) -> @owned Klass {
|
|
bb0(%0 : @owned $Klass):
|
|
%2 = alloc_stack $Klass
|
|
store %0 to [init] %2
|
|
%4 = load [copy] %2
|
|
destroy_addr %2
|
|
dealloc_stack %2
|
|
return %4
|
|
}
|
|
|