// RUN: %target-sil-opt -enable-sil-verify-all %s -temp-rvalue-opt | %FileCheck %s sil_stage canonical import Builtin import Swift ///////////// // Utility // ///////////// struct GS { var _base: Base var _value: Builtin.Int64 } class Klass {} sil @unknown : $@convention(thin) () -> () sil @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> () { bb0(%0 : $*Klass): %9999 = tuple() return %9999 : $() } sil @inguaranteed_user_with_result : $@convention(thin) (@in_guaranteed Klass) -> @out Klass { bb0(%0 : $*Klass, %1 : $*Klass): copy_addr %1 to [initialization] %0 : $*Klass %9999 = tuple() return %9999 : $() } sil @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error) /////////// // Tests // /////////// // CHECK-LABEL: sil @rvalue_simple // CHECK: bb0(%0 : $*GS, %1 : $*GS): // CHECK: [[A1:%.*]] = struct_element_addr %0 : $*GS, #GS._value // CHECK: [[V1:%.*]] = load [[A1]] : $*Builtin.Int64 // CHECK-NOT: alloc_stack // CHECK-NOT: copy_addr // CHECK: [[A2:%.*]] = struct_element_addr %1 : $*GS, #GS._value // CHECK: [[V2:%.*]] = load [[A2]] : $*Builtin.Int64 // CHECK: %{{.*}} = builtin "cmp_slt_Int64"([[V1]] : $Builtin.Int64, [[V2]] : $Builtin.Int64) : $Builtin.Int1 // CHECK-NOT: destroy_addr // CHECK-NOT: dealloc_stack // CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'rvalue_simple' sil @rvalue_simple : $@convention(thin) (@in GS, @inout GS) -> () { bb0(%0 : $*GS, %1 : $*GS): %2 = struct_element_addr %0 : $*GS, #GS._value %3 = load %2 : $*Builtin.Int64 %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value %7 = load %6 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%3 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @copy_from_temp // CHECK: bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): // CHECK-NEXT: builtin // CHECK-NEXT: copy_addr %1 to [initialization] %0 : $*GS // CHECK-NEXT: tuple // CHECK-NEXT: return sil @copy_from_temp : $@convention(thin) (@inout GS, @inout GS, Builtin.Int64) -> () { bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %8 = builtin "cmp_slt_Int64"(%2 : $Builtin.Int64, %2 : $Builtin.Int64) : $Builtin.Int1 copy_addr %4 to [initialization] %0 : $*GS destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @copy_back_to_src // CHECK: bb0(%0 : $*GS, %1 : $*GS): // CHECK-NEXT: struct_element_addr %1 // CHECK-NEXT: load // CHECK-NEXT: builtin // CHECK-NEXT: tuple // CHECK-NEXT: return sil @copy_back_to_src : $@convention(thin) (@in GS, @inout GS) -> () { bb0(%0 : $*GS, %1 : $*GS): %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value %7 = load %6 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 copy_addr %4 to %1 : $*GS destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @take_from_temp // CHECK: bb0(%0 : $*B, %1 : $*GS): // CHECK-NEXT: [[STK:%.*]] = alloc_stack // CHECK-NEXT: copy_addr %1 to [initialization] [[STK]] // CHECK-NEXT: [[INNER:%.*]] = struct_element_addr // CHECK-NEXT: copy_addr [take] [[INNER]] // CHECK-NEXT: dealloc_stack // CHECK-NEXT: tuple // CHECK-NEXT: return sil @take_from_temp : $@convention(thin) (@inout B, @inout GS) -> () { bb0(%0 : $*B, %1 : $*GS): %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %7 = struct_element_addr %4 : $*GS, #GS._base copy_addr [take] %7 to %0: $*B dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @load_in_wrong_block // CHECK: bb0(%0 : $*GS): // CHECK-NEXT: alloc_stack // CHECK-NEXT: copy_addr // CHECK-NEXT: struct_element_addr // CHECK-NEXT: br bb1 // CHECK: return sil @load_in_wrong_block : $@convention(thin) (@in GS) -> () { bb0(%0 : $*GS): %4 = alloc_stack $GS copy_addr %0 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value br bb1 bb1: %7 = load %6 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @projection_in_wrong_block // CHECK: bb0(%0 : $*GS): // CHECK-NEXT: alloc_stack // CHECK-NEXT: copy_addr // CHECK-NEXT: br bb1 // CHECK: return sil @projection_in_wrong_block : $@convention(thin) (@in GS) -> () { bb0(%0 : $*GS): %4 = alloc_stack $GS copy_addr %0 to [initialization] %4 : $*GS br bb1 bb1: %6 = struct_element_addr %4 : $*GS, #GS._value %7 = load %6 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @store_after_load // CHECK: bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): // CHECK-NEXT: [[A1:%.*]] = struct_element_addr %1 // CHECK-NEXT: [[A2:%.*]] = struct_element_addr %1 // CHECK-NEXT: load [[A2]] // CHECK-NEXT: store %2 to [[A1]] // CHECK-NEXT: builtin // CHECK-NEXT: tuple // CHECK-NEXT: return sil @store_after_load : $@convention(thin) (@in GS, @inout GS, Builtin.Int64) -> () { bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): %3 = struct_element_addr %1 : $*GS, #GS._value %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value %7 = load %6 : $*Builtin.Int64 store %2 to %3 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @store_after_two_loads // CHECK: bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): // CHECK-NEXT: [[A1:%.*]] = struct_element_addr %1 // CHECK-NEXT: [[A2:%.*]] = struct_element_addr %1 // CHECK-NEXT: load [[A2]] // CHECK-NEXT: load [[A2]] // CHECK-NEXT: store %2 to [[A1]] // CHECK-NEXT: builtin // CHECK-NEXT: tuple // CHECK-NEXT: return sil @store_after_two_loads : $@convention(thin) (@in GS, @inout GS, Builtin.Int64) -> () { bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): %3 = struct_element_addr %1 : $*GS, #GS._value %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value %7 = load %6 : $*Builtin.Int64 %8 = load %6 : $*Builtin.Int64 store %2 to %3 : $*Builtin.Int64 %9 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %8 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @store_before_load // CHECK: bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): // CHECK-NEXT: struct_element_addr %1 // CHECK-NEXT: [[T:%.*]] = alloc_stack // CHECK-NEXT: copy_addr %1 to [initialization] [[T]] // CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]] // CHECK-NEXT: store // CHECK-NEXT: load [[A]] // CHECK-NEXT: builtin // CHECK-NEXT: destroy_addr [[T]] // CHECK-NEXT: dealloc_stack [[T]] // CHECK-NEXT: tuple // CHECK-NEXT: return sil @store_before_load : $@convention(thin) (@in GS, @inout GS, Builtin.Int64) -> () { bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): %3 = struct_element_addr %1 : $*GS, #GS._value %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value store %2 to %3 : $*Builtin.Int64 %7 = load %6 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @store_between_loads // CHECK: bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): // CHECK-NEXT: struct_element_addr %1 // CHECK-NEXT: [[T:%.*]] = alloc_stack // CHECK-NEXT: copy_addr %1 to [initialization] [[T]] // CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]] // CHECK-NEXT: load [[A]] // CHECK-NEXT: store // CHECK-NEXT: load [[A]] // CHECK-NEXT: builtin // CHECK-NEXT: destroy_addr [[T]] // CHECK-NEXT: dealloc_stack [[T]] // CHECK-NEXT: tuple // CHECK-NEXT: return sil @store_between_loads : $@convention(thin) (@in GS, @inout GS, Builtin.Int64) -> () { bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): %3 = struct_element_addr %1 : $*GS, #GS._value %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value %7 = load %6 : $*Builtin.Int64 store %2 to %3 : $*Builtin.Int64 %8 = load %6 : $*Builtin.Int64 %9 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %8 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // CHECK-LABEL: sil @potential_store_before_load // CHECK: bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): // CHECK-NEXT: struct_element_addr %1 // CHECK-NEXT: [[T:%.*]] = alloc_stack // CHECK-NEXT: copy_addr %1 to [initialization] [[T]] // CHECK-NEXT: [[A:%.*]] = struct_element_addr [[T]] // CHECK: apply // CHECK-NEXT: load [[A]] // CHECK-NEXT: builtin // CHECK-NEXT: destroy_addr [[T]] // CHECK-NEXT: dealloc_stack [[T]] // CHECK-NEXT: tuple // CHECK-NEXT: return sil @potential_store_before_load : $@convention(thin) (@in GS, @inout_aliasable GS, Builtin.Int64) -> () { bb0(%0 : $*GS, %1 : $*GS, %2 : $Builtin.Int64): %3 = struct_element_addr %1 : $*GS, #GS._value %4 = alloc_stack $GS copy_addr %1 to [initialization] %4 : $*GS %6 = struct_element_addr %4 : $*GS, #GS._value %f = function_ref @unknown : $@convention(thin) () -> () %a = apply %f() : $@convention(thin) () -> () %7 = load %6 : $*Builtin.Int64 %8 = builtin "cmp_slt_Int64"(%7 : $Builtin.Int64, %7 : $Builtin.Int64) : $Builtin.Int1 destroy_addr %4 : $*GS dealloc_stack %4 : $*GS %9999 = tuple() return %9999 : $() } // Test temp RValue elimination on switches. // CHECK-LABEL: sil @rvalueSwitch // CHECK: bb1: // CHECK-NEXT: struct_element_addr %1 // CHECK-NEXT: load // CHECK-NOT: alloc_stack $UnfoldSequence // CHECK: return sil @rvalueSwitch : $@convention(method) (@inout UnfoldSequence) -> @out Optional { bb0(%0 : $*Optional, %1 : $*UnfoldSequence): %2 = struct_element_addr %1 : $*UnfoldSequence, #UnfoldSequence._done %3 = struct_element_addr %2 : $*Bool, #Bool._value %4 = load %3 : $*Builtin.Int1 cond_br %4, bb4, bb1 bb1: %6 = alloc_stack $UnfoldSequence copy_addr %1 to [initialization] %6 : $*UnfoldSequence %8 = struct_element_addr %6 : $*UnfoldSequence, #UnfoldSequence._next %9 = load %8 : $*@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@inout τ_0_0) -> @out Optional<τ_0_1> for %10 = alloc_stack $Optional %11 = struct_element_addr %1 : $*UnfoldSequence, #UnfoldSequence._state strong_retain %9 : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@inout τ_0_0) -> @out Optional<τ_0_1> for %13 = apply %9(%10, %11) : $@callee_guaranteed @substituted <τ_0_0, τ_0_1> (@inout τ_0_0) -> @out Optional<τ_0_1> for switch_enum_addr %10 : $*Optional, case #Optional.some!enumelt: bb3, case #Optional.none!enumelt: bb2 bb2: destroy_addr %10 : $*Optional dealloc_stack %10 : $*Optional destroy_addr %6 : $*UnfoldSequence dealloc_stack %6 : $*UnfoldSequence %19 = integer_literal $Builtin.Int1, -1 %20 = struct $Bool (%19 : $Builtin.Int1) store %20 to %2 : $*Bool %22 = alloc_stack $Optional inject_enum_addr %22 : $*Optional, #Optional.none!enumelt copy_addr [take] %22 to [initialization] %0 : $*Optional dealloc_stack %22 : $*Optional br bb5 bb3: %27 = unchecked_take_enum_data_addr %10 : $*Optional, #Optional.some!enumelt %28 = init_enum_data_addr %0 : $*Optional, #Optional.some!enumelt copy_addr [take] %27 to [initialization] %28 : $*Element dealloc_stack %10 : $*Optional destroy_addr %6 : $*UnfoldSequence dealloc_stack %6 : $*UnfoldSequence inject_enum_addr %0 : $*Optional, #Optional.some!enumelt br bb5 bb4: %35 = alloc_stack $Optional inject_enum_addr %35 : $*Optional, #Optional.none!enumelt copy_addr [take] %35 to [initialization] %0 : $*Optional dealloc_stack %35 : $*Optional br bb5 bb5: %40 = tuple () return %40 : $() } // Make sure that we can eliminate temporaries passed via a temporary rvalue to // an @in_guaranteed function. // // CHECK-LABEL: sil @inguaranteed_no_result : $@convention(thin) (@inout Klass) -> () { // CHECK: bb0([[ARG:%.*]] : $*Klass): // CHECK-NOT: copy_addr // CHECK: apply {{%.*}}([[ARG]]) // CHECK-NOT: destroy_addr // CHECK: } // end sil function 'inguaranteed_no_result' sil @inguaranteed_no_result : $@convention(thin) (@inout Klass) -> () { bb0(%0 : $*Klass): %1 = alloc_stack $Klass copy_addr %0 to [initialization] %1 : $*Klass %5 = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> () %6 = apply %5(%1) : $@convention(thin) (@in_guaranteed Klass) -> () destroy_addr %1 : $*Klass dealloc_stack %1 : $*Klass %9 = tuple () return %9 : $() } // CHECK-LABEL: sil @try_apply_argument : $@convention(thin) (@inout Klass) -> () { // CHECK-NOT: copy_addr // CHECK: try_apply {{%[0-9]+}}(%0) // CHECK: } // end sil function 'try_apply_argument' sil @try_apply_argument : $@convention(thin) (@inout Klass) -> () { bb0(%0 : $*Klass): %1 = alloc_stack $Klass copy_addr %0 to [initialization] %1 : $*Klass %5 = function_ref @throwing_function : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error) try_apply %5(%1) : $@convention(thin) (@in_guaranteed Klass) -> ((), @error Error), normal bb1, error bb2 bb1(%r : $()): br bb3 bb2(%e : $Error): br bb3 bb3: destroy_addr %1 : $*Klass dealloc_stack %1 : $*Klass %9 = tuple () return %9 : $() } // Make sure that we can eliminate temporaries passed via a temporary rvalue to // an @in_guaranteed function. // // CHECK-LABEL: sil @inguaranteed_with_result : $@convention(thin) (@inout Klass) -> () { // CHECK: bb0([[ARG:%.*]] : $*Klass): // dead temp // CHECK: [[TMP_OUT:%.*]] = alloc_stack $Klass // CHECK-NOT: copy_addr // CHECK: apply {{%.*}}([[TMP_OUT]], [[ARG]]) // CHECK-NOT: copy_addr // CHECK: destroy_addr [[TMP_OUT]] // CHECK-NOT: destroy_addr // CHECK: } // end sil function 'inguaranteed_with_result' sil @inguaranteed_with_result : $@convention(thin) (@inout Klass) -> () { bb0(%0 : $*Klass): %1 = alloc_stack $Klass %1a = alloc_stack $Klass copy_addr %0 to [initialization] %1 : $*Klass %5 = function_ref @inguaranteed_user_with_result : $@convention(thin) (@in_guaranteed Klass) -> @out Klass %6 = apply %5(%1a, %1) : $@convention(thin) (@in_guaranteed Klass) -> @out Klass destroy_addr %1a : $*Klass destroy_addr %1 : $*Klass dealloc_stack %1a : $*Klass dealloc_stack %1 : $*Klass %9 = tuple () return %9 : $() } // CHECK-LABEL: sil @non_overlapping_lifetime : $@convention(thin) (@in Klass) -> () { // CHECK: bb0([[ARG:%.*]] : $*Klass): // CHECK-NEXT: [[TMP:%.*]] = alloc_stack $Klass // CHECK-NEXT: copy_addr [[ARG]] to [initialization] [[TMP]] // CHECK-NEXT: destroy_addr [[ARG]] // CHECK: apply %{{[0-9]*}}([[TMP]]) // CHECK-NEXT: destroy_addr [[TMP]] // CHECK-NEXT: dealloc_stack [[TMP]] // CHECK-NEXT: tuple // CHECK-NEXT: return // CHECK-NEXT: } // end sil function 'non_overlapping_lifetime' sil @non_overlapping_lifetime : $@convention(thin) (@in Klass) -> () { bb0(%0 : $*Klass): %1a = alloc_stack $Klass %1 = alloc_stack $Klass %2 = alloc_stack $Klass copy_addr %0 to [initialization] %2 : $*Klass copy_addr [take] %2 to [initialization] %1 : $*Klass dealloc_stack %2 : $*Klass copy_addr %1 to [initialization] %1a : $*Klass destroy_addr %0 : $*Klass destroy_addr %1 : $*Klass dealloc_stack %1 : $*Klass %3 = function_ref @inguaranteed_user_without_result : $@convention(thin) (@in_guaranteed Klass) -> () apply %3(%1a) : $@convention(thin) (@in_guaranteed Klass) -> () destroy_addr %1a : $*Klass dealloc_stack %1a : $*Klass %9999 = tuple() return %9999 : $() } sil @$createKlass : $@convention(thin) () -> @out Klass sil @$appendKlass : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> () // CHECK-LABEL: sil @$overlapping_lifetime_in_function_all : $@convention(thin) () -> @out Klass { // CHECK: [[S1:%.*]] = alloc_stack $Klass // CHECK: [[S2:%.*]] = alloc_stack $Klass // CHECK: copy_addr [[S1]] to [initialization] [[S2]] // CHECK: apply {{%.*}}([[S2]], [[S1]]) // CHECK: } sil @$overlapping_lifetime_in_function_all : $@convention(thin) () -> @out Klass { bb0(%0 : $*Klass): %1 = alloc_stack $Klass %2 = function_ref @$createKlass : $@convention(thin) () -> @out Klass %3 = apply %2(%1) : $@convention(thin) () -> @out Klass %4 = alloc_stack $Klass copy_addr %1 to [initialization] %4 : $*Klass %6 = function_ref @$appendKlass : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> () %7 = apply %6(%4, %1) : $@convention(method) (@in_guaranteed Klass, @inout Klass) -> () destroy_addr %4 : $*Klass dealloc_stack %4 : $*Klass copy_addr [take] %1 to [initialization] %0 : $*Klass dealloc_stack %1 : $*Klass %12 = tuple () return %12 : $() } protocol P { func foo() } sil @getP : $@convention(thin) () -> @out Optional

// CHECK-LABEL: sil @handle_open_existential_addr : $@convention(thin) () -> () { // CHECK: [[P:%.*]] = unchecked_take_enum_data_addr // CHECK-NOT: copy_addr // CHECK: open_existential_addr immutable_access [[P]] // CHECK: } sil @handle_open_existential_addr : $@convention(thin) () -> () { bb0: %2 = alloc_stack $Optional

%3 = function_ref @getP : $@convention(thin) () -> @out Optional

%4 = apply %3(%2) : $@convention(thin) () -> @out Optional

cond_br undef, bb1, bb3 bb1: %9 = unchecked_take_enum_data_addr %2 : $*Optional

, #Optional.some!enumelt %10 = alloc_stack $P copy_addr %9 to [initialization] %10 : $*P %13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P %14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P, #P.foo : (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () %15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P>(%13) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () destroy_addr %2 : $*Optional

destroy_addr %10 : $*P dealloc_stack %10 : $*P dealloc_stack %2 : $*Optional

br bb2 bb2: %23 = tuple () return %23 : $() bb3: destroy_addr %2 : $*Optional

dealloc_stack %2 : $*Optional

br bb2 } // CHECK-LABEL: sil @open_existential_addr_blocks_optimization : $@convention(thin) () -> () { // CHECK: [[P:%.*]] = alloc_stack $P // CHECK: copy_addr {{.*}} to [initialization] [[P]] // CHECK: } sil @open_existential_addr_blocks_optimization : $@convention(thin) () -> () { bb0: %2 = alloc_stack $Optional

%3 = function_ref @getP : $@convention(thin) () -> @out Optional

%4 = apply %3(%2) : $@convention(thin) () -> @out Optional

cond_br undef, bb1, bb3 bb1: %9 = unchecked_take_enum_data_addr %2 : $*Optional

, #Optional.some!enumelt %10 = alloc_stack $P copy_addr %9 to [initialization] %10 : $*P destroy_addr %2 : $*Optional

%13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P %14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P, #P.foo : (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () %15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P>(%13) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () destroy_addr %10 : $*P dealloc_stack %10 : $*P dealloc_stack %2 : $*Optional

br bb2 bb2: %23 = tuple () return %23 : $() bb3: destroy_addr %2 : $*Optional

dealloc_stack %2 : $*Optional

br bb2 } // CHECK-LABEL: sil @witness_method_blocks_optimization : $@convention(thin) () -> () { // CHECK: [[P:%.*]] = alloc_stack $P // CHECK: copy_addr {{.*}} to [initialization] [[P]] // CHECK: } sil @witness_method_blocks_optimization : $@convention(thin) () -> () { bb0: %2 = alloc_stack $Optional

%3 = function_ref @getP : $@convention(thin) () -> @out Optional

%4 = apply %3(%2) : $@convention(thin) () -> @out Optional

cond_br undef, bb1, bb3 bb1: %9 = unchecked_take_enum_data_addr %2 : $*Optional

, #Optional.some!enumelt %10 = alloc_stack $P copy_addr %9 to [initialization] %10 : $*P %13 = open_existential_addr immutable_access %10 : $*P to $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P destroy_addr %2 : $*Optional

%14 = witness_method $@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P, #P.foo : (Self) -> () -> (), %13 : $*@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () %15 = apply %14<@opened("5E7A6328-EF75-11E9-A383-D0817AD3F637") P>(%13) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@in_guaranteed τ_0_0) -> () destroy_addr %10 : $*P dealloc_stack %10 : $*P dealloc_stack %2 : $*Optional

br bb2 bb2: %23 = tuple () return %23 : $() bb3: destroy_addr %2 : $*Optional

dealloc_stack %2 : $*Optional

br bb2 } /////////////////////////////////////////////////////////////////////////////// // Test checkTempObjectDestroy // Use-after free crashing an XCTest. sil @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () // Do not remove a copy that is released via a load (because // TempRValueOpt is an address-based optimization does not know how to // remove releases, and trying to do that would reduce to ARC // optimization). // CHECK-LABEL: sil @copyWithLoadRelease : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> () { // CHECK: bb0(%0 : $*Builtin.NativeObject): // CHECK: [[STK:%.*]] = alloc_stack $Builtin.NativeObject // CHECK: copy_addr %0 to [initialization] [[STK]] : $*Builtin.NativeObject // CHECK: [[VAL:%.*]] = load [[STK]] : $*Builtin.NativeObject // CHECK: apply %{{.*}}([[VAL]]) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () // CHECK: release_value [[VAL]] : $Builtin.NativeObject // CHECK: dealloc_stack [[STK]] : $*Builtin.NativeObject // CHECK-LABEL: } // end sil function 'copyWithLoadRelease' sil @copyWithLoadRelease : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> () { bb0(%0 : $*Builtin.NativeObject): %stk = alloc_stack $Builtin.NativeObject copy_addr %0 to [initialization] %stk : $*Builtin.NativeObject %obj = load %stk : $*Builtin.NativeObject %f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () %call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () release_value %obj : $Builtin.NativeObject dealloc_stack %stk : $*Builtin.NativeObject %v = tuple () return %v : $() } // Remove a copy that is released via a load as long as it was a copy [take]. // CHECK-LABEL: sil @takeWithLoadRelease : $@convention(thin) (@in Builtin.NativeObject) -> () { // CHECK: bb0(%0 : $*Builtin.NativeObject): // CHECK: [[V:%.*]] = load %0 : $*Builtin.NativeObject // CHECK: apply %{{.*}}([[V]]) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () // CHECK: release_value [[V]] : $Builtin.NativeObject // CHECK-LABEL: } // end sil function 'takeWithLoadRelease' sil @takeWithLoadRelease : $@convention(thin) (@in Builtin.NativeObject) -> () { bb0(%0 : $*Builtin.NativeObject): %stk = alloc_stack $Builtin.NativeObject copy_addr [take] %0 to [initialization] %stk : $*Builtin.NativeObject %obj = load %stk : $*Builtin.NativeObject %f = function_ref @takeGuaranteedObj : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () %call = apply %f(%obj) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () release_value %obj : $Builtin.NativeObject dealloc_stack %stk : $*Builtin.NativeObject %v = tuple () return %v : $() } // CHECK-LABEL: sil @eliminate_fix_lifetime_on_dest_copyaddr : $@convention(thin) (@inout Klass) -> () { // CHECK-NOT: alloc_stack // CHECK: fix_lifetime %0 // CHECK-NOT: alloc_stack // CHECK: } // end sil function 'eliminate_fix_lifetime_on_dest_copyaddr' sil @eliminate_fix_lifetime_on_dest_copyaddr : $@convention(thin) (@inout Klass) -> () { bb0(%0 : $*Klass): %3 = alloc_stack $Klass copy_addr %0 to [initialization] %3 : $*Klass fix_lifetime %3 : $*Klass destroy_addr %3 : $*Klass dealloc_stack %3 : $*Klass %9999 = tuple() return %9999 : $() }