// RUN: %target-sil-opt -enforce-exclusivity=none -enable-sil-verify-all %s -copy-forwarding -enable-copyforwarding -enable-destroyhoisting | %FileCheck %s // Declare this SIL to be canonical because some tests break raw SIL // conventions. e.g. address-type block args. -enforce-exclusivity=none is also // required to allow address-type block args in canonical SIL. sil_stage canonical import Builtin import Swift class AClass {} sil @f_in : $@convention(thin) (@in T) -> () sil @f_in_guaranteed : $@convention(thin) (@in_guaranteed T) -> () sil @f_out : $@convention(thin) () -> @out T sil @f_owned : $@convention(thin) (@owned T) -> () protocol P { init(_ i : Int32) mutating func poke() }; // CHECK-LABEL: nrvo // CHECK-NOT: copy_addr // CHECK: return sil hidden @nrvo : $@convention(thin) (Bool) -> @out T { bb0(%0 : $*T, %1 : $Bool): %2 = alloc_stack $T, var, name "ro" // users: %9, %15, %17, %19 debug_value_addr %0 : $*T debug_value_addr %2 : $*T %3 = struct_extract %1 : $Bool, #Bool._value // user: %4 cond_br %3, bb1, bb2 // id: %4 bb1: // Preds: bb0 %5 = metatype $@thick T.Type // user: %9 %6 = witness_method $T, #P.init!allocator : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 // user: %9 %7 = integer_literal $Builtin.Int32, 10 // user: %8 %8 = struct $Int32 (%7 : $Builtin.Int32) // user: %9 %9 = apply %6(%2, %8, %5) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 br bb3 // id: %10 bb2: // Preds: bb0 %11 = metatype $@thick T.Type // user: %15 %12 = witness_method $T, #P.init!allocator : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 // user: %15 %13 = integer_literal $Builtin.Int32, 1 // user: %14 %14 = struct $Int32 (%13 : $Builtin.Int32) // user: %15 %15 = apply %12(%2, %14, %11) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 br bb3 // id: %16 bb3: // Preds: bb1 bb2 copy_addr [take] %2 to [initialization] %0 : $*T // id: %17 %18 = tuple () // user: %20 debug_value_addr %0 : $*T debug_value_addr %2 : $*T dealloc_stack %2 : $*T // id: %19 return %18 : $() // id: %20 } // CHECK-LABEL: dont_assert_nrvo // CHECK: return sil hidden @dont_assert_nrvo : $@convention(thin) (Bool) -> @out T { bb0(%0 : $*T, %1 : $Bool): %2 = alloc_stack $T, var, name "ro" // users: %9, %15, %17, %19 debug_value_addr %0 : $*T debug_value_addr %2 : $*T %3 = struct_extract %1 : $Bool, #Bool._value // user: %4 %5 = metatype $@thick T.Type // user: %9 %6 = witness_method $T, #P.init!allocator : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 // user: %9 %7 = integer_literal $Builtin.Int32, 10 // user: %8 %8 = struct $Int32 (%7 : $Builtin.Int32) // user: %9 %9 = apply %6(%2, %8, %5) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (Int32, @thick τ_0_0.Type) -> @out τ_0_0 br bb3(%0 : $*T) bb3(%10 : $*T): // Preds: bb1 bb2 copy_addr [take] %2 to [initialization] %10 : $*T // id: %17 %18 = tuple () // user: %20 debug_value_addr %0 : $*T debug_value_addr %2 : $*T dealloc_stack %2 : $*T // id: %19 return %18 : $() // id: %20 } //CHECK-LABEL: forward_init //CHECK-NOT: copy_addr //CHECK-NOT: destroy_addr //CHECK: return sil hidden @forward_init : $@convention(thin) (@in T) -> () { bb0(%0 : $*T): debug_value_addr %0 : $*T %l1 = alloc_stack $T copy_addr %0 to [initialization] %l1 : $*T %f1 = function_ref @f_in : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () debug_value_addr %l1 : $*T dealloc_stack %l1 : $*T debug_value_addr %0 : $*T destroy_addr %0 : $*T %r1 = tuple () return %r1 : $() } //CHECK-LABEL: dont_assert_on_basic_block_arg //CHECK: return sil hidden @dont_assert_on_basic_block_arg : $@convention(thin) (@in T) -> () { bb0(%0 : $*T): debug_value_addr %0 : $*T br bb1(%0: $*T) bb1(%1 : $*T): %l1 = alloc_stack $T copy_addr %1 to [initialization] %l1 : $*T %f1 = function_ref @f_in : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () debug_value_addr %l1 : $*T dealloc_stack %l1 : $*T debug_value_addr %0 : $*T destroy_addr %0 : $*T %r1 = tuple () return %r1 : $() } //CHECK-LABEL: forward_noinit //CHECK-NOT: copy_addr //CHECK: destroy_addr //CHECK: return sil hidden @forward_noinit : $@convention(thin) (@in T) -> () { bb0(%0 : $*T): debug_value_addr %0 : $*T %l1 = alloc_stack $T %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 copy_addr %0 to %l1 : $*T debug_value_addr %l1 : $*T debug_value_addr %0 : $*T %f2 = function_ref @f_in : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () %c2 = apply %f2(%l1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () dealloc_stack %l1 : $*T destroy_addr %0 : $*T %r1 = tuple () return %r1 : $() } //CHECK-LABEL: forward_takeinit //CHECK-NOT: copy_addr //CHECK-NOT: destroy_addr //CHECK: return sil hidden @forward_takeinit : $@convention(thin) (@in T) -> () { bb0(%0 : $*T): debug_value_addr %0 : $*T %l1 = alloc_stack $T copy_addr [take] %0 to [initialization] %l1 : $*T %f1 = function_ref @f_in : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () debug_value_addr %0 : $*T debug_value_addr %l1 : $*T dealloc_stack %l1 : $*T %r1 = tuple () return %r1 : $() } //CHECK-LABEL: forward_takenoinit //CHECK-NOT: copy_addr //CHECK: destroy_addr //CHECK: return sil hidden @forward_takenoinit : $@convention(thin) (@in T) -> () { bb0(%0 : $*T): debug_value_addr %0 : $*T %l1 = alloc_stack $T %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 copy_addr [take] %0 to %l1 : $*T %f2 = function_ref @f_in : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () %c2 = apply %f2(%l1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () debug_value_addr %0 : $*T debug_value_addr %l1 : $*T dealloc_stack %l1 : $*T %r1 = tuple () return %r1 : $() } //CHECK-LABEL: backward_init //CHECK-NOT: copy_addr //CHECK-NOT: destroy_addr //CHECK: return sil hidden @backward_init : $@convention(thin) () -> @out T { bb0(%0 : $*T): %l1 = alloc_stack $T %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 debug_value_addr %0 : $*T debug_value_addr %l1 : $*T copy_addr %l1 to [initialization] %0 : $*T debug_value_addr %0 : $*T debug_value_addr %l1 : $*T destroy_addr %l1 : $*T dealloc_stack %l1 : $*T %t = tuple () return %t : $() } //CHECK-LABEL: dont_assert_backward_init //CHECK: return sil hidden @dont_assert_backward_init : $@convention(thin) () -> @out T { bb0(%0 : $*T): %l1 = alloc_stack $T br bb1(%0: $*T) bb1(%1 : $*T): %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 debug_value_addr %0 : $*T debug_value_addr %l1 : $*T copy_addr %l1 to [initialization] %0 : $*T debug_value_addr %0 : $*T debug_value_addr %l1 : $*T destroy_addr %l1 : $*T dealloc_stack %l1 : $*T %t = tuple () return %t : $() } //CHECK-LABEL: backward_noinit //CHECK: copy_addr //CHECK: destroy_addr //CHECK: return sil hidden @backward_noinit : $@convention(thin) () -> @out T { bb0(%0 : $*T): %l1 = alloc_stack $T %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c2 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 copy_addr %l1 to %0 : $*T destroy_addr %l1 : $*T dealloc_stack %l1 : $*T %t = tuple () return %t : $() } //CHECK-LABEL: backward_takeinit //CHECK-NOT: copy_addr //CHECK-NOT: destroy_addr //CHECK: return sil hidden @backward_takeinit : $@convention(thin) () -> @out T { bb0(%0 : $*T): %l1 = alloc_stack $T %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 debug_value_addr %0 : $*T debug_value_addr %l1 : $*T copy_addr [take] %l1 to [initialization] %0 : $*T debug_value_addr %0 : $*T debug_value_addr %l1 : $*T dealloc_stack %l1 : $*T %t = tuple () return %t : $() } //CHECK-LABEL: backward_takenoinit //CHECK: copy_addr //CHECK-NOT: destroy_addr //CHECK: return sil hidden @backward_takenoinit : $@convention(thin) () -> @out T { bb0(%0 : $*T): %l1 = alloc_stack $T %f1 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c1 = apply %f1(%0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 %c2 = apply %f1(%l1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 copy_addr [take] %l1 to %0 : $*T dealloc_stack %l1 : $*T %t = tuple () return %t : $() } //CHECK-LABEL: branch //CHECK-NOT: copy_addr //CHECK: return sil hidden @branch : $@convention(thin) (@in T, Bool) -> () { bb0(%0 : $*T, %1 : $Bool): %2 = struct_extract %1 : $Bool, #Bool._value // user: %3 cond_br %2, bb1, bb2 // id: %3 bb1: // Preds: bb0 %4 = function_ref @f_in : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () // user: %7 %5 = alloc_stack $T // users: %6, %7, %8 copy_addr %0 to [initialization] %5 : $*T // id: %6 %7 = apply %4(%5) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () dealloc_stack %5 : $*T // id: %8 br bb2 // id: %9 bb2: // Preds: bb0 bb1 destroy_addr %0 : $*T // id: %10 %11 = tuple () // user: %12 return %11 : $() // id: %12 } enum A { case Val(T) init(_ val: T) } sil [transparent] @_TFO8enuminit1A3ValU__fMGS0_Q__FQ_GS0_Q__ : $@convention(thin) (@in T, @thin A.Type) -> @out A //CHECK-LABEL: enuminit //CHECK-NOT: copy_addr //CHECK: return sil @enuminit : $@convention(thin) (@in T, @thin A.Type) -> @out A { bb0(%0 : $*A, %1 : $*T, %2 : $@thin A.Type): %3 = alloc_stack $A, var, name "sf" // users: %10, %14, %16 // function_ref enuminit.A.Val (enuminit.A.Type)(A) -> enuminit.A %4 = function_ref @_TFO8enuminit1A3ValU__fMGS0_Q__FQ_GS0_Q__ : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin A<τ_0_0>.Type) -> @out A<τ_0_0> // user: %9 %5 = metatype $@thin A.Type // user: %9 %6 = alloc_stack $T // users: %7, %9, %12 copy_addr %1 to [initialization] %6 : $*T // id: %7 %8 = alloc_stack $A // users: %9, %10, %11 %9 = apply %4(%8, %6, %5) : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin A<τ_0_0>.Type) -> @out A<τ_0_0> copy_addr [take] %8 to [initialization] %3 : $*A // id: %10 dealloc_stack %8 : $*A // id: %11 dealloc_stack %6 : $*T // id: %12 destroy_addr %1 : $*T // id: %13 copy_addr [take] %3 to [initialization] %0 : $*A // id: %14 %15 = tuple () // user: %17 dealloc_stack %3 : $*A // id: %16 return %15 : $() // id: %17 } //CHECK-LABEL: make_addronly //CHECK-NOT: copy_addr //CHECK: return sil hidden @make_addronly : $@convention(thin) () -> @out T { bb0(%0 : $*T): %1 = alloc_stack $T, let, name "t" // users: %3, %4, %5 %2 = function_ref @f_out : $@convention(thin) <τ_0_0> () -> @out τ_0_0 // user: %3 %3 = apply %2(%1) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 copy_addr [take] %1 to [initialization] %0 : $*T // id: %4 dealloc_stack %1 : $*T // id: %5 %6 = tuple () // user: %7 return %6 : $() // id: %7 } sil @_TFSq4someU__fMGSqQ__FQ_GSqQ__ : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin Optional<τ_0_0>.Type) -> @out Optional<τ_0_0> sil @_TFsoi2neU__FTGSqQ__Vs26_OptionalNilComparisonType_Sb : $@convention(thin) <τ_0_0> (@in Optional<τ_0_0>, _OptionalNilComparisonType) -> Bool //CHECK-LABEL: option_init //CHECK: alloc_stack //CHECK: alloc_stack //CHECK: alloc_stack //CHECK: copy_addr //CHECK: copy_addr //CHECK-NOT: copy_addr //CHECK: alloc_stack //CHECK: alloc_stack //CHECK: copy_addr //CHECK-NOT: copy_addr //CHECK: alloc_stack //CHECK-NOT: copy_addr //CHECK: copy_addr //CHECK-NOT: copy_addr //CHECK: alloc_stack //CHECK-NOT: copy_addr //CHECK: return sil hidden @option_init : $@convention(thin) (@in AnyObject) -> () { bb0(%0 : $*AnyObject): %g0 = alloc_stack $IteratorOverOne // 831 %s0 = struct_element_addr %g0 : $*IteratorOverOne, #IteratorOverOne._elements %l0 = alloc_stack $Optional // function_ref Swift.Optional.some (Swift.Optional.Type)(A) -> Swift.Optional %f0 = function_ref @_TFSq4someU__fMGSqQ__FQ_GSqQ__ : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin Optional<τ_0_0>.Type) -> @out Optional<τ_0_0> %t0 = metatype $@thin Optional.Type %i0 = apply %f0(%l0, %0, %t0) : $@convention(thin) <τ_0_0> (@in τ_0_0, @thin Optional<τ_0_0>.Type) -> @out Optional<τ_0_0> %g1 = alloc_stack $IteratorOverOne // 850 %s1 = struct_element_addr %g1 : $*IteratorOverOne, #IteratorOverOne._elements // We can't backward propagate this yet because we can't analyze struct_element_addr copy dest. copy_addr [take] %l0 to [initialization] %s1 : $*Optional // We ignore this copy because its Def is used by struct_element_addr copy_addr [take] %g1 to [initialization] %g0 : $*IteratorOverOne %l1 = alloc_stack $Optional // 869 %l2 = alloc_stack $Optional // 873 // We ignore this copy because its Def is used by struct_element_addr copy_addr %s0 to [initialization] %l2 : $*Optional %l3 = alloc_stack $Optional // 877 %o1 = enum $Optional, #Optional.none!enumelt store %o1 to %l3 : $*Optional // We can't backward propagate this yet because we can't analyze struct_element_addr copy dest. copy_addr [take] %l3 to %s0 : $*Optional dealloc_stack %l3 : $*Optional // We can't forward propagate this because l2 is deallocated, but we can backward propagate l1. copy_addr [take] %l2 to [initialization] %l1 : $*Optional dealloc_stack %l2 : $*Optional %l4 = alloc_stack $Optional // 889 %o2 = load %l1 : $*Optional store %o2 to %l4 : $*Optional %s5 = struct $_OptionalNilComparisonType () retain_value %o2 : $Optional %f1 = function_ref @_TFsoi2neU__FTGSqQ__Vs26_OptionalNilComparisonType_Sb : $@convention(thin) <τ_0_0> (@in Optional<τ_0_0>, _OptionalNilComparisonType) -> Bool %c5 = apply %f1(%l4, %s5) : $@convention(thin) <τ_0_0> (@in Optional<τ_0_0>, _OptionalNilComparisonType) -> Bool dealloc_stack %l4 : $*Optional destroy_addr %l1 : $*Optional dealloc_stack %l1 : $*Optional dealloc_stack %g1 : $*IteratorOverOne dealloc_stack %l0 : $*Optional destroy_addr %g0 : $*IteratorOverOne dealloc_stack %g0 : $*IteratorOverOne %p0 = tuple () return %p0 : $() } // Premature release of optional NSData // after optimization // // Check that destroy is not hoisted above a retain of a transitively // referenced object. // // CHECK-LABEL: load_nontrivial // CHECK: load %0 : $*Optional // CHECK-NOT: destroy_addr // CHECK: unchecked_enum_data %{{.*}} : $Optional // CHECK-NOT: destroy_addr // CHECK: strong_retain %{{.*}} : $AClass // CHECK: destroy_addr %0 sil hidden @load_nontrivial : $@convention(thin) () -> () { bb0: %v0 = alloc_stack $Optional %v1 = alloc_stack $Optional %f0 = function_ref @f_out : $@convention(thin) () -> @out A %c0 = apply %f0(%v0) : $@convention(thin) <τ_0_0> () -> @out τ_0_0 copy_addr %v0 to [initialization] %v1 : $*Optional %f1 = function_ref @f_in : $@convention(thin) (@in A) -> () %c1 = apply %f1(%v1) : $@convention(thin) <τ_0_0> (@in τ_0_0) -> () dealloc_stack %v1 : $*Optional %l1 = load %v0 : $*Optional %d1 = unchecked_enum_data %l1 : $Optional, #Optional.some!enumelt %f2 = function_ref @f_owned : $@convention(thin) (@owned A) -> () %c2 = apply %f2(%d1) : $@convention(thin) <τ_0_0> (@owned τ_0_0) -> () strong_retain %d1 : $AClass destroy_addr %v0 : $*Optional %34 = tuple () dealloc_stack %v0 : $*Optional return %34 : $() } // CHECK-LABEL: sil @nil_comparison // CHECK: alloc_stack // CHECK-NOT: copy_addr // CHECK-NOT: destroy_addr // CHECK: switch_enum_addr %0 // CHECK: [[D:%.*]] = unchecked_take_enum_data_addr %0 // CHECK: destroy_addr [[D]] // CHECK: return sil @nil_comparison : $@convention(thin) (@in Optional) -> Bool { bb0(%0 : $*Optional): %2 = alloc_stack $Optional copy_addr %0 to [initialization] %2 : $*Optional destroy_addr %0 : $*Optional switch_enum_addr %2 : $*Optional, case #Optional.some!enumelt: bb1, case #Optional.none!enumelt: bb2 bb1: %6 = unchecked_take_enum_data_addr %2 : $*Optional, #Optional.some!enumelt destroy_addr %6 : $*T %8 = integer_literal $Builtin.Int1, -1 br bb3(%8 : $Builtin.Int1) bb2: %10 = integer_literal $Builtin.Int1, 0 br bb3(%10 : $Builtin.Int1) bb3(%12 : $Builtin.Int1): %13 = struct $Bool (%12 : $Builtin.Int1) dealloc_stack %2 : $*Optional return %13 : $Bool } sil @use: $@convention(thin) (@inout T) -> () // We currently don't handle reasoning about multiple copy_addr instructions at // once. With the current logic we must not optimize this case (we would have to // prove that we can replace both copy_addr to be able to optimize). // CHECK-LABEL: sil @not_dominated_uses // CHECK: alloc_stack // CHECK: cond_br // CHECK: bb1 // CHECK: copy_addr // CHECK: apply // CHECK: br bb3 // CHECK: bb2 // CHECK: copy_addr // CHECK: apply // CHECK: br bb3 // CHECK: bb3 // CHECK: apply // CHECK: destroy_addr sil @not_dominated_uses: $@convention(thin) (@in Optional, @in Optional, Bool) -> () { bb0(%0 : $*Optional, %1 : $*Optional, %3 : $Bool): %4 = alloc_stack $Optional %5 = struct_extract %3 : $Bool, #Bool._value %f = function_ref @use : $@convention(thin) (@inout T2) -> () cond_br %5, bb1, bb2 bb1: copy_addr [take] %0 to [initialization] %4 : $*Optional %r1 = apply %f>(%4) : $@convention(thin) (@inout T2) -> () br bb3 bb2: copy_addr [take] %1 to [initialization] %4 : $*Optional %r2 = apply %f>(%4) : $@convention(thin) (@inout T2) -> () br bb3 bb3: %r3 = apply %f>(%4) : $@convention(thin) (@inout T2) -> () destroy_addr %4 : $*Optional dealloc_stack %4 : $*Optional %13 = tuple() return %13 : $() } //CHECK-LABEL: test_in_guaranteed //CHECK: copy_addr %1 to [initialization] //CHECK-NOT: copy_addr //CHECK-NOT: destroy_addr //CHECK: return sil hidden @test_in_guaranteed : $@convention(thin) (@in T) -> @out T { bb0(%0 : $*T, %1 : $*T): %l1 = alloc_stack $T copy_addr %1 to [initialization] %l1 : $*T %f1 = function_ref @f_in_guaranteed : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () %c2 = apply %f1(%l1) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0) -> () copy_addr %l1 to [initialization] %0 : $*T destroy_addr %l1 : $*T dealloc_stack %l1 : $*T %t = tuple () return %t : $() } // CHECK-LABEL: forward_unchecked_ref_cast_addr // CHECK: unchecked_ref_cast_addr // CHECK-NOT: copy_addr // CHECK: return sil hidden @forward_unchecked_ref_cast_addr : $@convention(thin) (@in AnyObject) -> @out AClass { bb0(%0 : $*AClass, %1 : $*AnyObject): %3 = alloc_stack $AnyObject // user: %10 %4 = alloc_stack $AnyObject // user: %9 %5 = alloc_stack $AClass // users: %6, %7, %8 unchecked_ref_cast_addr AnyObject in %1 : $*AnyObject to AClass in %5 : $*AClass // id: %6 copy_addr [take] %5 to [initialization] %0 : $*AClass // id: %7 dealloc_stack %5 : $*AClass // id: %8 dealloc_stack %4 : $*AnyObject // id: %9 dealloc_stack %3 : $*AnyObject // id: %10 %11 = tuple () // user: %12 return %11 : $() // id: %12 } sil @element_use : $@convention(thin) (@inout P) -> () // CHECK-LABEL: backward_propagate_enum_init // CHECK-NOT: copy_addr // CHECK: %[[TMP:.*]] = init_enum_data_addr %0 : $*Optional

// CHECK: copy_addr %1 to [initialization] %[[TMP]] // CHECK-NOT: copy_addr sil @backward_propagate_enum_init : $@convention(thin) (@inout P) -> @out Optional

{ bb0(%0 : $*Optional

, %1 : $*P): %2 = alloc_stack $P copy_addr %1 to [initialization] %2 : $*P %3 = function_ref @element_use : $@convention(thin) (@inout P) -> () %4 = apply %3(%1) : $@convention(thin) (@inout P) -> () %5 = init_enum_data_addr %0 : $*Optional

, #Optional.some!enumelt copy_addr %2 to [initialization] %5 : $*P inject_enum_addr %0 : $*Optional

, #Optional.some!enumelt destroy_addr %2 : $*P dealloc_stack %2 : $*P %27 = tuple () return %27 : $() } // CHECK-LABEL: backward_propagate_exi_init // CHECK-NOT: copy_addr // CHECK: %[[TMP:.*]] = init_existential_addr %0 : $*P, $T // CHECK: copy_addr %1 to [initialization] %[[TMP]] : $*T // CHECK-NOT: copy_addr sil @backward_propagate_exi_init : $@convention(thin) (@inout T) -> @out P { bb0(%0 : $*P, %1 : $*T): %2 = alloc_stack $T copy_addr %1 to [initialization] %2 : $*T %3 = witness_method $T, #P.poke : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@inout τ_0_0) -> () %4 = apply %3(%1) : $@convention(witness_method: P) <τ_0_0 where τ_0_0 : P> (@inout τ_0_0) -> () %5 = init_existential_addr %0 : $*P, $T copy_addr [take] %2 to [initialization] %5 : $*T dealloc_stack %2 : $*T %27 = tuple () return %27 : $() } public struct S { @_hasStorage var f: T { get set } @_hasStorage var g: T { get set } init(f: T, g: T) } // Test a dead copy that initializes a stack local. // CHECK-LABEL: deadtemp // CHECK: %[[G:.*]] = struct_element_addr %0 : $*S, #S.g // CHECK-NOT: copy_addr // CHECK: %[[F:.*]] = struct_element_addr %0 : $*S, #S.f // CHECK: copy_addr %[[G]] to %[[F]] : $*T // CHECK: return sil @deadtemp : $@convention(thin) (@inout S) -> () { bb0(%0 : $*S): %1 = struct_element_addr %0 : $*S, #S.g %2 = alloc_stack $T copy_addr %1 to [initialization] %2 : $*T %4 = struct_element_addr %0 : $*S, #S.f copy_addr [take] %2 to %4 : $*T dealloc_stack %2 : $*T %7 = tuple () return %7 : $() } // Test assigning into a stack local. // CHECK-LABEL: deadtemp_assign // CHECK: %[[G:.*]] = struct_element_addr %0 : $*S, #S.g // CHECK-NOT: copy_addr // CHECK: destroy_addr %2 : $*T // CHECK: %[[F:.*]] = struct_element_addr %0 : $*S, #S.f // CHECK: copy_addr %[[G]] to %[[F]] : $*T // CHECK: return sil @deadtemp_assign : $@convention(thin) (@inout S) -> () { bb0(%0 : $*S): %1 = struct_element_addr %0 : $*S, #S.g %2 = alloc_stack $T copy_addr %1 to %2 : $*T %4 = struct_element_addr %0 : $*S, #S.f copy_addr [take] %2 to %4 : $*T dealloc_stack %2 : $*T %7 = tuple () return %7 : $() } // Test a dead copy that initializes a stack local, // taking the source, and initializing the destination. // CHECK-LABEL: deadtemp_take_init // CHECK: %[[G:.*]] = struct_element_addr %0 : $*S, #S.g // CHECK-NOT: copy_addr // CHECK: %[[F:.*]] = struct_element_addr %0 : $*S, #S.f // CHECK: copy_addr [take] %[[G]] to [initialization] %[[F]] : $*T // CHECK: return sil @deadtemp_take_init : $@convention(thin) (@inout S) -> () { bb0(%0 : $*S): %1 = struct_element_addr %0 : $*S, #S.g %2 = alloc_stack $T copy_addr [take] %1 to [initialization] %2 : $*T %4 = struct_element_addr %0 : $*S, #S.f copy_addr [take] %2 to [initialization] %4 : $*T dealloc_stack %2 : $*T %7 = tuple () return %7 : $() } // Test a dead copy that initializes a stack local and destroy's it later. // CHECK-LABEL: deadtemp_destroy // CHECK: %[[G:.*]] = struct_element_addr %0 : $*S, #S.g // CHECK-NOT: copy_addr // CHECK: %[[F:.*]] = struct_element_addr %0 : $*S, #S.f // CHECK: copy_addr %[[G]] to %[[F]] : $*T // CHECK-NOT: destroy_addr // CHECK: return sil @deadtemp_destroy : $@convention(thin) (@inout S) -> () { bb0(%0 : $*S): %1 = struct_element_addr %0 : $*S, #S.g %2 = alloc_stack $T copy_addr %1 to [initialization] %2 : $*T %4 = struct_element_addr %0 : $*S, #S.f copy_addr %2 to %4 : $*T destroy_addr %2 : $*T dealloc_stack %2 : $*T %7 = tuple () return %7 : $() } struct ObjWrapper { var obj: AnyObject } // Test that backward copy propagation does not interfere with the previous // value of the copy's destination. The `load` is a use of the `alloc` value, // but not a direct use. Since it occurs between the initialization of `temp` // and the copy from temp into `alloc`, the copy into `alloc` cannot be backward // propagated. // Swift CI: resilience bot seg faults in stdlib/RangeReplaceable.swift.gyb. // // CHECK-LABEL: sil @testLoadDestroy : $@convention(thin) (@in ObjWrapper, @in ObjWrapper) -> () { // CHECK: bb0(%0 : $*ObjWrapper, %1 : $*ObjWrapper): // CHECK: [[ALLOC:%.*]] = alloc_stack $ObjWrapper, var, name "o" // CHECK: [[ELT_ADDR:%.*]] = struct_element_addr [[ALLOC]] : $*ObjWrapper, #ObjWrapper.obj // CHECK: copy_addr %0 to [initialization] [[ALLOC]] : $*ObjWrapper // CHECK: [[TEMP:%.*]] = alloc_stack $ObjWrapper // CHECK: copy_addr %1 to [initialization] [[TEMP]] : $*ObjWrapper // CHECK: [[LD:%.*]] = load [[ELT_ADDR]] : $*AnyObject // CHECK: strong_release [[LD]] : $AnyObject // CHECK: copy_addr [take] [[TEMP]] to [initialization] [[ALLOC]] : $*ObjWrapper // CHECK: dealloc_stack [[TEMP]] : $*ObjWrapper // CHECK: dealloc_stack [[ALLOC]] : $*ObjWrapper // CHECK: %{{.*}} = tuple () // CHECK: return %{{.*}} : $() // CHECK-LABEL: } // end sil function 'testLoadDestroy' sil @testLoadDestroy : $@convention(thin) (@in ObjWrapper, @in ObjWrapper) -> () { bb(%0 : $*ObjWrapper, %1 : $*ObjWrapper): // Fully initialize a new stack var to arg0. %alloc = alloc_stack $ObjWrapper, var, name "o" %objadr = struct_element_addr %alloc : $*ObjWrapper, #ObjWrapper.obj copy_addr %0 to [initialization] %alloc : $*ObjWrapper // Fully initialize a temporary to arg1. // Rewriting this to %alloc would alias with the subsequent load. %temp = alloc_stack $ObjWrapper copy_addr %1 to [initialization] %temp : $*ObjWrapper // Load and release an reference from arg0 inside the stack var. %obj = load %objadr : $*AnyObject strong_release %obj : $AnyObject // Move `temp` copy of arg1 into the stack var. copy_addr [take] %temp to [initialization] %alloc : $*ObjWrapper dealloc_stack %temp : $*ObjWrapper dealloc_stack %alloc : $*ObjWrapper %74 = tuple () return %74 : $() } // Helper for multipleUse sil @multipleArg : $@convention(thin) (@in_guaranteed T, @in_guaranteed T) -> () { bb0(%0 : $*T, %1 : $*T): %r1 = tuple () return %r1 : $() } // Test a corner case of forward copy propagation in which simple substitution // does not work (the source is reinitialized) and need to propagate to an // instruction with multiple uses of the source. // CHECK-LABEL: sil hidden @multipleUse : $@convention(thin) (@in T, @in T) -> () { // CHECK: bb0(%0 : $*T, %1 : $*T): // CHECK: [[A:%.*]] = alloc_stack $T // CHECK: copy_addr [take] %0 to [initialization] [[A]] : $*T // CHECK: [[F:%.*]] = function_ref @multipleArg : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0) -> () // CHECK: [[C:%.*]] = apply [[F]]([[A]], [[A]]) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0) -> () // CHECK: destroy_addr [[A]] : $*T // CHECK: dealloc_stack [[A]] : $*T // CHECK: copy_addr [take] %1 to [initialization] %0 : $*T // CHECK: %{{.*}} = tuple () // CHECK: return %{{.*}} : $() // CHECK: } // end sil function 'multipleUse' sil hidden @multipleUse : $@convention(thin) (@in T, @in T) -> () { bb0(%0 : $*T, %1 : $*T): %l1 = alloc_stack $T copy_addr [take] %0 to [initialization] %l1 : $*T %f1 = function_ref @multipleArg : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0) -> () %c1 = apply %f1(%l1, %l1) : $@convention(thin) <τ_0_0> (@in_guaranteed τ_0_0, @in_guaranteed τ_0_0) -> () destroy_addr %l1 : $*T dealloc_stack %l1 : $*T destroy_addr %0 : $*T // Reinitialize copy's source to avoid a fast isSourceDeadAtCopy check. copy_addr [take] %1 to [initialization] %0 : $*T %r1 = tuple () return %r1 : $() } // Suppress NRVO when a guaranteed argument is stored into a local without // making a copy first. By eliminating the copy into the outgoing argument, NRVO // would be propagating an unowned (+0) local value into an owned (+1) result. // CHECK-LABEL: sil @foo : $@convention(thin) (@guaranteed Builtin.NativeObject) -> @out Builtin.NativeObject { // CHECK: bb0(%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject): // CHECK: [[LOCAL:%.*]] = alloc_stack $Builtin.NativeObject // CHECK: copy_addr [[LOCAL]] to [initialization] %0 : $*Builtin.NativeObject // CHECK-LABEL: } // end sil function 'foo' sil @foo : $@convention(thin) (@guaranteed Builtin.NativeObject) -> @out Builtin.NativeObject { bb0(%0 : $*Builtin.NativeObject, %1 : $Builtin.NativeObject): %2 = alloc_stack $Builtin.NativeObject store %1 to %2 : $*Builtin.NativeObject copy_addr %2 to [initialization] %0 : $*Builtin.NativeObject dealloc_stack %2 : $*Builtin.NativeObject %6 = tuple () return %6 : $() } // CHECK-LABEL: sil @testKnownStoredValueUser : $@convention(thin) (@guaranteed AClass) -> @out AClass { // CHECK: [[ALLOC:%.*]] = alloc_stack $AClass // CHECK: copy_addr [[ALLOC]] to %0 : $*AClass // CHECK: strong_retain %1 : $AClass // CHECK: destroy_addr [[ALLOC]] : $*AClass // CHECK-LABEL: } // end sil function 'testKnownStoredValueUser' sil @testKnownStoredValueUser : $@convention(thin) (@guaranteed AClass) -> (@out AClass) { bb0(%0 : $*AClass, %1 : $AClass): %2 = alloc_stack $AClass store %1 to %2 : $*AClass copy_addr %2 to %0 : $*AClass strong_retain %1 : $AClass destroy_addr %2 : $*AClass dealloc_stack %2 : $*AClass %999 = tuple () return %999 : $() } // CHECK-LABEL: sil @testExtractedStoredValueUser1 : $@convention(thin) (@guaranteed ObjWrapper) -> @out AnyObject { // CHECK: bb0(%0 : $*AnyObject, %1 : $ObjWrapper): // CHECK: [[ALLOC:%.*]] = alloc_stack $AnyObject // CHECK: [[EXTRACT:%.*]] = struct_extract %1 : $ObjWrapper, #ObjWrapper.obj // CHECK: store [[EXTRACT]] to [[ALLOC]] : $*AnyObject // CHECK: copy_addr [[ALLOC]] to %0 : $*AnyObject // CHECK: strong_retain [[EXTRACT]] : $AnyObject // CHECK: destroy_addr [[ALLOC]] : $*AnyObject // CHECK-LABEL: } // end sil function 'testExtractedStoredValueUser1' sil @testExtractedStoredValueUser1 : $@convention(thin) (@guaranteed ObjWrapper) -> (@out AnyObject) { bb0(%0 : $*AnyObject, %1 : $ObjWrapper): %2 = alloc_stack $AnyObject %3 = struct_extract %1 : $ObjWrapper, #ObjWrapper.obj store %3 to %2 : $*AnyObject copy_addr %2 to %0 : $*AnyObject strong_retain %3 : $AnyObject destroy_addr %2 : $*AnyObject dealloc_stack %2 : $*AnyObject %999 = tuple () return %999 : $() } // CHECK-LABEL: sil @testExtractedStoredValueUser2 : $@convention(thin) (@guaranteed ObjWrapper) -> @out AnyObject { // CHECK: bb0(%0 : $*AnyObject, %1 : $ObjWrapper): // CHECK: [[ALLOC:%.*]] = alloc_stack $AnyObject // CHECK: [[EXTRACT:%.*]] = struct_extract %1 : $ObjWrapper, #ObjWrapper.obj // CHECK: store [[EXTRACT]] to [[ALLOC]] : $*AnyObject // CHECK: copy_addr [[ALLOC]] to %0 : $*AnyObject // CHECK: retain_value %1 : $ObjWrapper // CHECK: destroy_addr [[ALLOC]] : $*AnyObject // CHECK-LABEL: } // end sil function 'testExtractedStoredValueUser2' sil @testExtractedStoredValueUser2 : $@convention(thin) (@guaranteed ObjWrapper) -> (@out AnyObject) { bb0(%0 : $*AnyObject, %1 : $ObjWrapper): %2 = alloc_stack $AnyObject %3 = struct_extract %1 : $ObjWrapper, #ObjWrapper.obj store %3 to %2 : $*AnyObject copy_addr %2 to %0 : $*AnyObject retain_value %1 : $ObjWrapper destroy_addr %2 : $*AnyObject dealloc_stack %2 : $*AnyObject %999 = tuple () return %999 : $() } struct AClassWrapper { var a: AClass var b: AClass } // CHECK-LABEL: sil @testUnknownStoredValueUser : $@convention(thin) (@guaranteed AClass) -> @out AClass { // CHECK: bb0(%0 : $*AClass, %1 : $AClass): // CHECK: [[ALLOC:%.*]] = alloc_stack $AClass // CHECK: store %1 to %2 : $*AClass // CHECK: [[STRUCT:%.*]] = struct $AClassWrapper (%1 : $AClass, %1 : $AClass) // CHECK: copy_addr [[ALLOC]] to %0 : $*AClass // CHECK: retain_value [[STRUCT]] : $AClassWrapper // CHECK: destroy_addr [[ALLOC]] : $*AClass // CHECK-LABEL: } // end sil function 'testUnknownStoredValueUser' sil @testUnknownStoredValueUser : $@convention(thin) (@guaranteed AClass) -> (@out AClass) { bb0(%0 : $*AClass, %1 : $AClass): %2 = alloc_stack $AClass store %1 to %2 : $*AClass %3 = struct $AClassWrapper (%1 : $AClass, %1 : $AClass) copy_addr %2 to %0 : $*AClass retain_value %3 : $AClassWrapper destroy_addr %2 : $*AClass dealloc_stack %2 : $*AClass %999 = tuple () return %999 : $() } // [SR-8526]: Memory leak after switch in release configuration // CHECK-LABEL: sil @testGlobalHoistToStoredValue : $@convention(thin) (@owned AClass, @inout AClass) -> () { // CHECK: bb0(%0 : $AClass, %1 : $*AClass): // CHECK-NEXT: [[LOCAL:%.*]] = alloc_stack $AClass // CHECK-NEXT: store %0 to [[LOCAL]] : $*AClass // CHECK-NEXT: copy_addr [[LOCAL]] to %1 : $*AClass // CHECK-NEXT: retain_value %0 : $AClass // CHECK-NEXT: store %0 to %1 : $*AClass // CHECK-NEXT: destroy_addr [[LOCAL]] : $*AClass // CHECK-NEXT: br bb1 // CHECK: bb1: // CHECK-NEXT: dealloc_stack [[LOCAL]] : $*AClass // CHECK-NEXT: release_value %0 : $AClass // CHECK-NEXT: tuple () // CHECK-NEXT: return // CHECK-LABEL: } // end sil function 'testGlobalHoistToStoredValue' sil @testGlobalHoistToStoredValue : $@convention(thin) (@owned AClass, @inout AClass) -> () { bb0(%obj : $AClass, %ptr : $*AClass ): %local = alloc_stack $AClass store %obj to %local : $*AClass copy_addr %local to %ptr : $*AClass retain_value %obj : $AClass store %obj to %ptr : $*AClass br bb1 bb1: destroy_addr %local : $*AClass dealloc_stack %local : $*AClass release_value %obj : $AClass %v = tuple () return %v : $() }