mirror of
https://github.com/apple/swift.git
synced 2026-02-27 18:26:24 +01:00
The previous algorithm was doing an iterative forward data flow analysis followed by a reverse data flow analysis. I suspect the history here is that it was a reverse analysis, and that didn't really work for infinite loops, and so complexity accumulated. The new algorithm is quite straightforward and relies on the allocations being properly jointly post-dominated, just not nested. We simply walk forward through the blocks in consistent-with-dominance order, maintaining the stack of active allocations and deferring deallocations that are improperly nested until we deallocate the allocations above it. The only real subtlety is that we have to delay walking into dead-end regions until we've seen all of the edges into them, so that we can know whether we have a coherent stack state in them. If the state is incoherent, we need to remove any deallocations of previous allocations because we cannot talk correctly about what's on top of the stack. The reason I'm doing this, besides it just being a simpler and hopefully faster algorithm, is that modeling some of the uses of the async stack allocator properly requires builtins that cannot just be semantically reordered. That should be somewhat easier to handle with the new approach, although really (1) we should not have runtime functions that need this and (2) we're going to need a conservatively-correct solution that's different from this anyway because hoisting allocations is *also* limited in its own way. I've attached a rather pedantic proof of the correctness of the algorithm. The thing that concerns me most about the rewritten pass is that it isn't actually validating joint post-dominance on input, so if you give it bad input, it might be a little mystifying to debug the verifier failures.
1249 lines
38 KiB
Plaintext
1249 lines
38 KiB
Plaintext
// RUN: %target-sil-opt -sil-print-types -compute-escape-effects -stack-promotion -enable-sil-verify-all %s | %FileCheck %s
|
|
|
|
// REQUIRES: swift_in_compiler
|
|
|
|
sil_stage canonical
|
|
|
|
import Builtin
|
|
import Swift
|
|
import SwiftShims
|
|
|
|
class XX {
|
|
@_hasStorage var x: Int32
|
|
|
|
init()
|
|
}
|
|
|
|
class YY {
|
|
@_hasStorage var xx: XX
|
|
|
|
init(newx: XX)
|
|
}
|
|
|
|
class DummyArrayStorage<Element> {
|
|
@_hasStorage var count : Int
|
|
@_hasStorage var capacity : Int
|
|
init()
|
|
}
|
|
|
|
sil @xx_init : $@convention(thin) (@guaranteed XX) -> XX {
|
|
bb0(%0 : $XX):
|
|
%1 = integer_literal $Builtin.Int32, 0
|
|
%2 = struct $Int32 (%1 : $Builtin.Int32)
|
|
%3 = ref_element_addr %0 : $XX, #XX.x
|
|
store %2 to %3 : $*Int32
|
|
return %0 : $XX
|
|
}
|
|
|
|
sil @take_y : $@convention(thin) (@owned YY) -> () {
|
|
bb0(%0 : $YY):
|
|
// Currently escape analysis cannot see that this release does not capture
|
|
// anything. For the test this strong_release is not relevant anyway.
|
|
// strong_release %0 : $YY
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
sil @consume_int : $@convention(thin) (Int32) -> ()
|
|
|
|
// CHECK-LABEL: sil @simple_promote
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK: return
|
|
sil @simple_promote : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_escaping
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK: return
|
|
sil @dont_promote_escaping : $@convention(thin) () -> XX {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
return %n1 : $XX
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_in_unreachable :
|
|
// CHECK: bb1:
|
|
// CHECK-NEXT: alloc_ref [stack] $XX
|
|
// CHECK: apply
|
|
// CHECK: apply
|
|
// CHECK-NEXT: unreachable
|
|
// CHECK: } // end sil function 'promote_in_unreachable'
|
|
sil @promote_in_unreachable : $@convention(thin) () -> () {
|
|
bb0:
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
%f2 = function_ref @consume_int : $@convention(thin) (Int32) -> ()
|
|
%a = apply %f2(%l2) : $@convention(thin) (Int32) -> ()
|
|
// An unreachable block may missing the final release.
|
|
unreachable
|
|
|
|
bb2:
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_in_unreachable_loop :
|
|
// CHECK: bb1:
|
|
// CHECK-NEXT: alloc_ref [stack] $XX
|
|
// CHECK: apply
|
|
// CHECK: apply
|
|
// CHECK-NEXT: strong_release
|
|
// CHECK-NEXT: dealloc_stack_ref
|
|
// CHECK-NEXT: cond_br
|
|
// CHECK: } // end sil function 'promote_in_unreachable_loop'
|
|
sil @promote_in_unreachable_loop : $@convention(thin) () -> () {
|
|
bb0:
|
|
br bb1
|
|
|
|
bb1:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
%f2 = function_ref @consume_int : $@convention(thin) (Int32) -> ()
|
|
%a = apply %f2(%l2) : $@convention(thin) (Int32) -> ()
|
|
strong_release %o1 : $XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb2:
|
|
unreachable
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_nested
|
|
// CHECK: [[X:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: [[Y:%[0-9]+]] = alloc_ref [stack] $YY
|
|
// CHECK: apply
|
|
// CHECK: dealloc_stack_ref [[Y]] : $YY
|
|
// CHECK: dealloc_stack_ref [[X]] : $XX
|
|
// CHECK: return
|
|
sil @promote_nested : $@convention(thin) () -> () {
|
|
bb0:
|
|
%x = alloc_ref $XX
|
|
%y = alloc_ref $YY
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
|
|
%f1 = function_ref @take_y : $@convention(thin) (@owned YY) -> ()
|
|
%a = apply %f1(%y) : $@convention(thin) (@owned YY) -> ()
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_with_unreachable_block
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: bb1:
|
|
// CHECK-NEXT: unreachable
|
|
// CHECK: bb2:
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK: return
|
|
sil @promote_with_unreachable_block : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
unreachable
|
|
|
|
bb2:
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @no_return_function
|
|
// Just check that we don't crash on this.
|
|
// It's a corner case, so we don't care if stack promotion is done or not.
|
|
// CHECK: unreachable
|
|
sil @no_return_function : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
br bb1
|
|
|
|
bb1:
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
unreachable
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_in_loop_with_if
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: bb2:
|
|
// CHECK: strong_release
|
|
// CHECK-NEXT: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK: bb3:
|
|
// CHECK-NEXT: strong_release
|
|
// CHECK-NEXT: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK: return
|
|
sil @promote_in_loop_with_if : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
br bb1
|
|
|
|
bb1:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
cond_br undef, bb2, bb3
|
|
|
|
bb2:
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
br bb4(%l2 : $Int32)
|
|
|
|
bb3:
|
|
strong_release %n1 : $XX
|
|
%i1 = integer_literal $Builtin.Int32, 0
|
|
%i2 = struct $Int32 (%i1 : $Builtin.Int32)
|
|
br bb4(%i2 : $Int32)
|
|
|
|
bb4(%a1 : $Int32):
|
|
cond_br undef, bb1, bb5
|
|
|
|
bb5:
|
|
return %a1 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_with_crictical_exit_edge
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK: } // end sil function 'dont_promote_with_crictical_exit_edge'
|
|
sil @dont_promote_with_crictical_exit_edge : $@convention(thin) () -> () {
|
|
bb0:
|
|
br bb1
|
|
|
|
bb1:
|
|
%o1 = alloc_ref $XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb2:
|
|
strong_release %o1 : $XX
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_with_non_crictial_exit_edge
|
|
// CHECK: bb1:
|
|
// CHECK: alloc_ref [stack] $XX
|
|
// CHECK: bb2:
|
|
// CHECK: dealloc_stack_ref
|
|
// CHECK: bb3:
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref
|
|
// CHECK: } // end sil function 'promote_with_non_crictial_exit_edge'
|
|
sil @promote_with_non_crictial_exit_edge : $@convention(thin) () -> () {
|
|
bb0:
|
|
br bb1
|
|
|
|
bb1:
|
|
%o1 = alloc_ref $XX
|
|
cond_br undef, bb2, bb3
|
|
|
|
bb2:
|
|
br bb1
|
|
|
|
bb3:
|
|
strong_release %o1 : $XX
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_use_of_container_outside_loop1
|
|
// CHECK: bb0:
|
|
// CHECK: [[Y:%[0-9]+]] = alloc_ref [stack] $YY
|
|
// CHECK: bb1:
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK: bb2:
|
|
// CHECK: apply
|
|
// CHECK: dealloc_stack_ref [[Y]] : $YY
|
|
// CHECK: } // end sil function 'dont_promote_use_of_container_outside_loop1'
|
|
sil @dont_promote_use_of_container_outside_loop1 : $@convention(thin) () -> () {
|
|
bb0:
|
|
%y = alloc_ref $YY
|
|
br bb1
|
|
|
|
bb1:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb2:
|
|
%f1 = function_ref @take_y : $@convention(thin) (@owned YY) -> ()
|
|
%a = apply %f1(%y) : $@convention(thin) (@owned YY) -> ()
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_use_of_container_outside_loop2
|
|
// CHECK: alloc_ref [stack] $YY
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_use_of_container_outside_loop2'
|
|
sil @dont_promote_use_of_container_outside_loop2 : $@convention(thin) () -> () {
|
|
bb0:
|
|
%y = alloc_ref $YY
|
|
br bb1
|
|
|
|
bb1:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb2:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_use_of_container_outside_loop3
|
|
// CHECK: alloc_ref [stack] $YY
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_use_of_container_outside_loop3'
|
|
sil @dont_promote_use_of_container_outside_loop3 : $@convention(thin) () -> () {
|
|
bb0:
|
|
%y = alloc_ref $YY
|
|
br bb1
|
|
|
|
bb1:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
cond_br undef, bb2, bb3
|
|
|
|
bb2:
|
|
br bb1
|
|
|
|
bb3:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_use_of_container_outside_loop4
|
|
// CHECK: alloc_ref [stack] $YY
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_use_of_container_outside_loop4'
|
|
sil @dont_promote_use_of_container_outside_loop4 : $@convention(thin) () -> () {
|
|
bb0:
|
|
%y = alloc_ref $YY
|
|
br bb1
|
|
|
|
bb1:
|
|
br bb2
|
|
|
|
bb2:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
cond_br undef, bb3, bb4
|
|
|
|
bb3:
|
|
br bb1
|
|
|
|
bb4:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil {{.*}}@dont_promote_use_before_alloc1
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK: } // end sil function 'dont_promote_use_before_alloc1'
|
|
sil @dont_promote_use_before_alloc1 : $@convention(thin) (@guaranteed XX) -> Int32 {
|
|
bb0(%0 : $XX):
|
|
br bb1(%0 : $XX)
|
|
|
|
bb1(%a1 : $XX):
|
|
%l1 = ref_element_addr %a1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %a1 : $XX
|
|
%o1 = alloc_ref $XX
|
|
cond_br undef, bb1(%o1 : $XX), bb2
|
|
|
|
bb2:
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil {{.*}}@dont_promote_use_before_alloc2
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_use_before_alloc2'
|
|
sil @dont_promote_use_before_alloc2 : $@convention(thin) (@guaranteed XX) -> () {
|
|
bb0(%0 : $XX):
|
|
br bb1(%0 : $XX)
|
|
|
|
bb1(%a1 : $XX):
|
|
strong_release %a1 : $XX
|
|
%o1 = alloc_ref $XX
|
|
cond_br undef, bb2(%o1 : $XX), bb3
|
|
|
|
bb2(%a2 : $XX):
|
|
br bb1(%a2 : $XX)
|
|
|
|
bb3:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
|
|
// CHECK-LABEL: sil {{.*}}@dont_promote_use_before_alloc3
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_use_before_alloc3'
|
|
sil @dont_promote_use_before_alloc3 : $@convention(thin) (@guaranteed XX) -> () {
|
|
bb0(%0 : $XX):
|
|
br bb1(%0 : $XX)
|
|
|
|
bb1(%a0 : $XX):
|
|
br bb2(%a0 : $XX)
|
|
|
|
bb2(%a1 : $XX):
|
|
strong_release %a1 : $XX
|
|
%o1 = alloc_ref $XX
|
|
cond_br undef, bb3(%o1 : $XX), bb4
|
|
|
|
bb3(%a2 : $XX):
|
|
br bb1(%a2 : $XX)
|
|
|
|
bb4:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
|
|
// CHECK-LABEL: sil @dont_promote_use_before_alloc4
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_use_before_alloc4'
|
|
sil @dont_promote_use_before_alloc4 : $@convention(thin) () -> () {
|
|
bb0:
|
|
%s = alloc_stack $XX
|
|
br bb1
|
|
|
|
bb1:
|
|
%l = load %s : $*XX
|
|
strong_release %l : $XX
|
|
%o1 = alloc_ref $XX
|
|
store %o1 to %s : $*XX
|
|
cond_br undef, bb2, bb3
|
|
|
|
bb2:
|
|
br bb1
|
|
|
|
bb3:
|
|
dealloc_stack %s : $*XX
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_with_container_in_loop
|
|
// CHECK: bb1:
|
|
// CHECK: alloc_ref [stack] $YY
|
|
// CHECK: bb2:
|
|
// CHECK: alloc_ref [stack] $XX
|
|
// CHECK: dealloc_stack_ref %{{[0-9]+}} : $XX
|
|
// CHECK: dealloc_stack_ref %{{[0-9]+}} : $YY
|
|
// CHECK: bb3:
|
|
// CHECK: } // end sil function 'promote_with_container_in_loop'
|
|
sil @promote_with_container_in_loop : $@convention(thin) () -> () {
|
|
bb0:
|
|
br bb1
|
|
|
|
bb1:
|
|
%y = alloc_ref $YY
|
|
br bb2
|
|
|
|
bb2:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
cond_br undef, bb1, bb3
|
|
|
|
bb3:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
|
|
// CHECK-LABEL: sil {{.*}}@dont_promote_not_dominated_use
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_not_dominated_use'
|
|
sil @dont_promote_not_dominated_use : $@convention(thin) (@guaranteed XX) -> () {
|
|
bb0(%0 : $XX):
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
br bb3(%0 : $XX)
|
|
|
|
bb2:
|
|
%o1 = alloc_ref $XX
|
|
br bb3(%o1 : $XX)
|
|
|
|
bb3(%a1 : $XX):
|
|
strong_release %a1 : $XX
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_with_not_post_dominating_container
|
|
// CHECK: bb0:
|
|
// CHECK: alloc_ref [stack] $YY
|
|
// CHECK: bb1:
|
|
// CHECK: apply
|
|
// CHECK: dealloc_stack_ref %{{[0-9]+}} : $YY
|
|
// CHECK: bb2:
|
|
// CHECK: alloc_ref [stack] $XX
|
|
// CHECK: store
|
|
// CHECK: dealloc_stack_ref %{{[0-9]+}} : $XX
|
|
// CHECK: dealloc_stack_ref %{{[0-9]+}} : $YY
|
|
// CHECK: bb3:
|
|
// CHECK: } // end sil function 'promote_with_not_post_dominating_container'
|
|
sil @promote_with_not_post_dominating_container : $@convention(thin) () -> () {
|
|
bb0:
|
|
%y = alloc_ref $YY
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
%f1 = function_ref @take_y : $@convention(thin) (@owned YY) -> ()
|
|
%a = apply %f1(%y) : $@convention(thin) (@owned YY) -> ()
|
|
br bb3
|
|
|
|
bb2:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %y : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
br bb3
|
|
|
|
bb3:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_no_outermost_dominating_block
|
|
// CHECK: alloc_ref $YY
|
|
// CHECK: alloc_ref $YY
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK: } // end sil function 'dont_promote_no_outermost_dominating_block'
|
|
sil @dont_promote_no_outermost_dominating_block : $@convention(thin) () -> () {
|
|
bb0:
|
|
cond_br undef, bb1, bb2
|
|
bb1:
|
|
%y1 = alloc_ref $YY
|
|
br bb3(%y1 : $YY)
|
|
bb2:
|
|
%y2 = alloc_ref $YY
|
|
br bb3(%y2 : $YY)
|
|
|
|
bb3(%ya : $YY):
|
|
br bb4
|
|
|
|
bb4:
|
|
%x = alloc_ref $XX
|
|
%rea = ref_element_addr %ya : $YY, #YY.xx
|
|
store %x to %rea : $*XX
|
|
cond_br undef, bb5, bb6
|
|
|
|
bb5:
|
|
br bb4
|
|
|
|
bb6:
|
|
%t = tuple ()
|
|
return %t : $()
|
|
}
|
|
|
|
|
|
// CHECK-LABEL: sil @promote_with_use_in_loop
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: {{^}}bb2:
|
|
// CHECK-NEXT: strong_release
|
|
// CHECK-NEXT: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK-NEXT: return
|
|
sil @promote_with_use_in_loop : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
br bb1
|
|
|
|
bb1:
|
|
strong_retain %n1 : $XX
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb2:
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_with_use_in_multi_block_backedge_loop
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: {{^}}bb4:
|
|
// CHECK-NEXT: strong_release
|
|
// CHECK-NEXT: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK-NEXT: return
|
|
sil @promote_with_use_in_multi_block_backedge_loop : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
br bb1
|
|
|
|
bb1:
|
|
strong_retain %n1 : $XX
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
cond_br undef, bb2, bb4
|
|
|
|
bb2:
|
|
br bb3
|
|
|
|
bb3:
|
|
br bb1
|
|
|
|
bb4:
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_with_other_stack_allocs
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: {{^}}bb5:
|
|
// CHECK-NEXT: dealloc_stack
|
|
// CHECK-NEXT: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK-NEXT: return
|
|
sil @promote_with_other_stack_allocs : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%s1 = alloc_stack $Int32
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
br bb1
|
|
|
|
bb1:
|
|
cond_br undef, bb2, bb3
|
|
|
|
bb2:
|
|
br bb4(%l2 : $Int32)
|
|
|
|
bb3:
|
|
%i1 = integer_literal $Builtin.Int32, 0
|
|
%i2 = struct $Int32 (%i1 : $Builtin.Int32)
|
|
br bb4(%i2 : $Int32)
|
|
|
|
bb4(%a1 : $Int32):
|
|
cond_br undef, bb1, bb5
|
|
|
|
bb5:
|
|
dealloc_stack %s1 : $*Int32
|
|
return %a1 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_and_fix_stack_nesting1
|
|
// CHECK: alloc_stack
|
|
// CHECK: {{^}}bb2:
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] $XX
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref [[O]] : $XX
|
|
// CHECK: dealloc_stack
|
|
// CHECK: return
|
|
sil @promote_and_fix_stack_nesting1 : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%s1 = alloc_stack $Int32
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
br bb2
|
|
|
|
bb2:
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
dealloc_stack %s1 : $*Int32
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_and_fix_stack_nesting2
|
|
// CHECK: alloc_stack
|
|
// CHECK-NEXT: alloc_ref [stack] $XX
|
|
// CHECK: {{^}}bb3:
|
|
// CHECK: strong_release
|
|
// CHECK-NEXT: dealloc_stack_ref
|
|
// CHECK-NEXT: dealloc_stack
|
|
// CHECK-NEXT: return
|
|
sil @promote_and_fix_stack_nesting2 : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%s1 = alloc_stack $Int32
|
|
%o1 = alloc_ref $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
br bb3
|
|
|
|
bb2:
|
|
dealloc_stack %s1 : $*Int32
|
|
unreachable
|
|
|
|
bb3:
|
|
dealloc_stack %s1 : $*Int32
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_and_fix_stack_nesting3
|
|
// CHECK: alloc_stack
|
|
// CHECK: {{^}}bb2:
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] [tail_elems{{.*}}] $XX
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref
|
|
// CHECK: dealloc_stack
|
|
// CHECK: return
|
|
sil @promote_and_fix_stack_nesting3 : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%s1 = alloc_stack $Int32
|
|
%i = integer_literal $Builtin.Word, 1
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
br bb2
|
|
|
|
bb2:
|
|
%o1 = alloc_ref [tail_elems $Int32 * %i : $Builtin.Word] $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
dealloc_stack %s1 : $*Int32
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_and_fix_stack_nesting4
|
|
// CHECK: alloc_stack
|
|
// CHECK: {{^}}bb2:
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] [tail_elems{{.*}}] $XX
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref
|
|
// CHECK: dealloc_stack
|
|
// CHECK: return
|
|
sil @promote_and_fix_stack_nesting4 : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%s1 = alloc_stack $Int32
|
|
cond_br undef, bb1, bb2
|
|
|
|
bb1:
|
|
br bb2
|
|
|
|
bb2:
|
|
%i = integer_literal $Builtin.Word, 1
|
|
%o1 = alloc_ref [tail_elems $Int32 * %i : $Builtin.Word] $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
dealloc_stack %s1 : $*Int32
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_and_fix_stack_nesting5
|
|
// CHECK: alloc_stack
|
|
// CHECK: {{^}}bb2({{.*}}):
|
|
// CHECK: [[O:%[0-9]+]] = alloc_ref [stack] [tail_elems{{.*}}] $XX
|
|
// CHECK: strong_release
|
|
// CHECK: dealloc_stack_ref
|
|
// CHECK: dealloc_stack
|
|
// CHECK: return
|
|
sil @promote_and_fix_stack_nesting5 : $@convention(thin) (Builtin.Word) -> Int32 {
|
|
bb0(%0 : $Builtin.Word):
|
|
%s1 = alloc_stack $Int32
|
|
cond_br undef, bb1, bb2(%0 : $Builtin.Word)
|
|
|
|
bb1:
|
|
br bb2(%0 : $Builtin.Word)
|
|
|
|
bb2(%i : $Builtin.Word):
|
|
%o1 = alloc_ref [tail_elems $Int32 * %i : $Builtin.Word] $XX
|
|
%f1 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX
|
|
%n1 = apply %f1(%o1) : $@convention(thin) (@guaranteed XX) -> XX
|
|
dealloc_stack %s1 : $*Int32
|
|
%l1 = ref_element_addr %n1 : $XX, #XX.x
|
|
%l2 = load %l1 : $*Int32
|
|
strong_release %n1 : $XX
|
|
return %l2 : $Int32
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_array
|
|
// CHECK: [[B:%[0-9]+]] = alloc_ref [stack] [tail_elems $Int
|
|
// CHECK: [[IF:%[0-9]+]] = function_ref @init_array_with_buffer
|
|
// CHECK: [[A:%[0-9]+]] = apply [[IF]]([[B]],
|
|
// CHECK: tuple_extract [[A]]
|
|
// CHECK: tuple_extract [[A]]
|
|
// CHECK: [[TAF:%[0-9]+]] = function_ref @take_array
|
|
// CHECK: apply [[TAF]]
|
|
// CHECK: dealloc_stack_ref [[B]]
|
|
// CHECK: return
|
|
sil @promote_array : $@convention(thin) (Int) -> () {
|
|
bb0(%0 : $Int):
|
|
// allocate the buffer
|
|
%1 = integer_literal $Builtin.Word, 2
|
|
%2 = alloc_ref [tail_elems $Int * %1 : $Builtin.Word] $DummyArrayStorage<Int>
|
|
|
|
// initialize the buffer
|
|
%3 = integer_literal $Builtin.Int32, 2
|
|
%4 = struct $Int32 (%3 : $Builtin.Int32)
|
|
%8 = metatype $@thin Array<Int>.Type
|
|
%9 = function_ref @init_array_with_buffer : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%10 = apply %9(%2, %4, %8) : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%11 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 0
|
|
%12 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 1
|
|
%13 = struct_extract %12 : $UnsafeMutablePointer<Int>, #UnsafeMutablePointer._rawValue
|
|
%14 = pointer_to_address %13 : $Builtin.RawPointer to [strict] $*Int
|
|
|
|
// store the 2 elements
|
|
store %0 to %14 : $*Int
|
|
%16 = integer_literal $Builtin.Word, 1
|
|
%17 = index_addr %14 : $*Int, %16 : $Builtin.Word
|
|
store %0 to %17 : $*Int
|
|
|
|
// pass the array to a function
|
|
%19 = function_ref @take_array : $@convention(thin) (@owned Array<Int>) -> ()
|
|
%20 = apply %19(%11) : $@convention(thin) (@owned Array<Int>) -> ()
|
|
%21 = tuple ()
|
|
return %21 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_promote_escaping_array
|
|
// CHECK: alloc_ref [tail_elems $Int
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK: return
|
|
sil @dont_promote_escaping_array : $@convention(thin) (Int) -> @owned Array<Int> {
|
|
bb0(%0 : $Int):
|
|
// allocate the buffer
|
|
%1 = integer_literal $Builtin.Word, 2
|
|
%2 = alloc_ref [tail_elems $Int * %1 : $Builtin.Word] $DummyArrayStorage<Int>
|
|
|
|
// initialize the buffer
|
|
%3 = integer_literal $Builtin.Int32, 2
|
|
%4 = struct $Int32 (%3 : $Builtin.Int32)
|
|
%8 = metatype $@thin Array<Int>.Type
|
|
%9 = function_ref @init_array_with_buffer : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%10 = apply %9(%2, %4, %8) : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%11 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 0
|
|
%12 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 1
|
|
%13 = struct_extract %12 : $UnsafeMutablePointer<Int>, #UnsafeMutablePointer._rawValue
|
|
%14 = pointer_to_address %13 : $Builtin.RawPointer to [strict] $*Int
|
|
|
|
// store the 2 elements
|
|
store %0 to %14 : $*Int
|
|
%16 = integer_literal $Builtin.Word, 1
|
|
%17 = index_addr %14 : $*Int, %16 : $Builtin.Word
|
|
store %0 to %17 : $*Int
|
|
|
|
// return the array
|
|
return %11 : $Array<Int>
|
|
}
|
|
|
|
sil [_semantics "array.uninitialized"] @init_array_with_buffer : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>) {
|
|
[%0: escape v** => %r.0]
|
|
}
|
|
|
|
sil [_semantics "array.withUnsafeMutableBufferPointer"] @withUnsafeMutableBufferPointer : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error) {
|
|
[%2: noescape v**]
|
|
}
|
|
|
|
sil @take_array : $@convention(thin) (@owned Array<Int>) -> () {
|
|
bb0(%0 : $Array<Int>):
|
|
release_value %0 : $Array<Int>
|
|
%2 = tuple ()
|
|
return %2 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_array_withUnsafeMutableBufferPointer_use
|
|
// CHECK: [[ARR:%.*]] = alloc_stack $Array<Int>
|
|
// CHECK: [[B:%[0-9]+]] = alloc_ref [stack] [tail_elems $Int
|
|
// CHECK: [[IF:%[0-9]+]] = function_ref @init_array_with_buffer
|
|
// CHECK: [[A:%[0-9]+]] = apply [[IF]]([[B]],
|
|
// CHECK: [[AV:%.*]] = tuple_extract [[A]]{{.*}}, 0
|
|
// CHECK: tuple_extract [[A]]
|
|
// CHECK: store [[AV]] to [[ARR]]
|
|
// CHECK: dealloc_stack_ref [[B]]
|
|
// CHECK: return
|
|
|
|
sil @promote_array_withUnsafeMutableBufferPointer_use : $@convention(thin) (Int, @owned @callee_owned (@inout Int) -> (@out (), @error Error)) -> () {
|
|
bb0(%0 : $Int, %closure: $@callee_owned (@inout Int) -> (@out (), @error Error)):
|
|
|
|
%the_array = alloc_stack $Array<Int>
|
|
|
|
// allocate the buffer
|
|
%1 = integer_literal $Builtin.Word, 2
|
|
%2 = alloc_ref [tail_elems $Int * %1 : $Builtin.Word] $DummyArrayStorage<Int>
|
|
|
|
// initialize the buffer
|
|
%3 = integer_literal $Builtin.Int32, 2
|
|
%4 = struct $Int32 (%3 : $Builtin.Int32)
|
|
%8 = metatype $@thin Array<Int>.Type
|
|
%9 = function_ref @init_array_with_buffer : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%10 = apply %9(%2, %4, %8) : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%11 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 0
|
|
%12 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 1
|
|
%13 = struct_extract %12 : $UnsafeMutablePointer<Int>, #UnsafeMutablePointer._rawValue
|
|
%14 = pointer_to_address %13 : $Builtin.RawPointer to [strict] $*Int
|
|
|
|
// store the 2 elements
|
|
store %0 to %14 : $*Int
|
|
%16 = integer_literal $Builtin.Word, 1
|
|
%17 = index_addr %14 : $*Int, %16 : $Builtin.Word
|
|
store %0 to %17 : $*Int
|
|
|
|
store %11 to %the_array : $*Array<Int>
|
|
|
|
// pass the array to a function
|
|
%19 = function_ref @take_array : $@convention(thin) (@owned Array<Int>) -> ()
|
|
%20 = apply %19(%11) : $@convention(thin) (@owned Array<Int>) -> ()
|
|
|
|
// pass the array to the withUnsafeMutableBufferPointer closure.
|
|
%21 = function_ref @withUnsafeMutableBufferPointer : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error)
|
|
%22 = alloc_stack $()
|
|
%23 = apply [nothrow] %21(%22, %closure, %the_array) : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error)
|
|
dealloc_stack %22 : $*()
|
|
|
|
dealloc_stack %the_array: $*Array<Int>
|
|
%24 = tuple ()
|
|
return %24 : $()
|
|
}
|
|
|
|
sil @array_capture_closure : $@convention(thin) (@inout Int, @owned Array<Int>) -> (@out ())
|
|
|
|
// CHECK-LABEL: sil @dont_promote_array_withUnsafeMutableBufferPointer_capture
|
|
// CHECK: alloc_ref [tail_elems $Int
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK: return
|
|
|
|
sil @dont_promote_array_withUnsafeMutableBufferPointer_capture : $@convention(thin) (Int) -> () {
|
|
bb0(%0 : $Int):
|
|
|
|
%the_array = alloc_stack $Array<Int>
|
|
|
|
// allocate the buffer
|
|
%1 = integer_literal $Builtin.Word, 2
|
|
%2 = alloc_ref [tail_elems $Int * %1 : $Builtin.Word] $DummyArrayStorage<Int>
|
|
|
|
// initialize the buffer
|
|
%3 = integer_literal $Builtin.Int32, 2
|
|
%4 = struct $Int32 (%3 : $Builtin.Int32)
|
|
%8 = metatype $@thin Array<Int>.Type
|
|
%9 = function_ref @init_array_with_buffer : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%10 = apply %9(%2, %4, %8) : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%11 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 0
|
|
%12 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 1
|
|
%13 = struct_extract %12 : $UnsafeMutablePointer<Int>, #UnsafeMutablePointer._rawValue
|
|
%14 = pointer_to_address %13 : $Builtin.RawPointer to [strict] $*Int
|
|
|
|
// store the 2 elements
|
|
store %0 to %14 : $*Int
|
|
%16 = integer_literal $Builtin.Word, 1
|
|
%17 = index_addr %14 : $*Int, %16 : $Builtin.Word
|
|
store %0 to %17 : $*Int
|
|
|
|
store %11 to %the_array : $*Array<Int>
|
|
|
|
// pass the array to a function
|
|
%19 = function_ref @take_array : $@convention(thin) (@owned Array<Int>) -> ()
|
|
%20 = apply %19(%11) : $@convention(thin) (@owned Array<Int>) -> ()
|
|
|
|
// pass the array to the withUnsafeMutableBufferPointer closure.
|
|
%closure_fun = function_ref @array_capture_closure : $@convention(thin) (@inout Int, @owned Array<Int>) -> (@out ())
|
|
%closure = partial_apply %closure_fun(%11) : $@convention(thin) (@inout Int, @owned Array<Int>) -> (@out ())
|
|
%closure2 = convert_function %closure : $@callee_owned (@inout Int) -> (@out ()) to $@callee_owned (@inout Int) -> (@out (), @error Error)
|
|
%21 = function_ref @withUnsafeMutableBufferPointer : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error)
|
|
%22 = alloc_stack $()
|
|
%23 = apply [nothrow]%21(%22, %closure2, %the_array) : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error)
|
|
dealloc_stack %22 : $*()
|
|
|
|
dealloc_stack %the_array: $*Array<Int>
|
|
%24 = tuple ()
|
|
return %24 : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @promote_array_withUnsafeMutableBufferPointer_capture
|
|
// CHECK: [[ARR:%.*]] = alloc_stack $Array<Int>
|
|
// CHECK: [[B:%[0-9]+]] = alloc_ref [stack] [tail_elems $Int
|
|
// CHECK: [[IF:%[0-9]+]] = function_ref @init_array_with_buffer
|
|
// CHECK: [[A:%[0-9]+]] = apply [[IF]]([[B]],
|
|
// CHECK: [[AV:%.*]] = tuple_extract [[A]]{{.*}}, 0
|
|
// CHECK: tuple_extract [[A]]
|
|
// CHECK: store [[AV]] to [[ARR]]
|
|
// CHECK: dealloc_stack_ref [[B]]
|
|
// CHECK: return
|
|
|
|
sil @promote_array_withUnsafeMutableBufferPointer_capture : $@convention(thin) (Int, @owned Array<Int>) -> () {
|
|
bb0(%0 : $Int, %another: $Array<Int>):
|
|
|
|
%the_array = alloc_stack $Array<Int>
|
|
|
|
// allocate the buffer
|
|
%1 = integer_literal $Builtin.Word, 2
|
|
%2 = alloc_ref [tail_elems $Int * %1 : $Builtin.Word] $DummyArrayStorage<Int>
|
|
|
|
// initialize the buffer
|
|
%3 = integer_literal $Builtin.Int32, 2
|
|
%4 = struct $Int32 (%3 : $Builtin.Int32)
|
|
%8 = metatype $@thin Array<Int>.Type
|
|
%9 = function_ref @init_array_with_buffer : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%10 = apply %9(%2, %4, %8) : $@convention(thin) (@owned DummyArrayStorage<Int>, Int32, @thin Array<Int>.Type) -> @owned (Array<Int>, UnsafeMutablePointer<Int>)
|
|
%11 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 0
|
|
%12 = tuple_extract %10 : $(Array<Int>, UnsafeMutablePointer<Int>), 1
|
|
%13 = struct_extract %12 : $UnsafeMutablePointer<Int>, #UnsafeMutablePointer._rawValue
|
|
%14 = pointer_to_address %13 : $Builtin.RawPointer to [strict] $*Int
|
|
|
|
// store the 2 elements
|
|
store %0 to %14 : $*Int
|
|
%16 = integer_literal $Builtin.Word, 1
|
|
%17 = index_addr %14 : $*Int, %16 : $Builtin.Word
|
|
store %0 to %17 : $*Int
|
|
|
|
store %11 to %the_array : $*Array<Int>
|
|
|
|
// pass the array to a function
|
|
%19 = function_ref @take_array : $@convention(thin) (@owned Array<Int>) -> ()
|
|
%20 = apply %19(%11) : $@convention(thin) (@owned Array<Int>) -> ()
|
|
|
|
// pass the array to the withUnsafeMutableBufferPointer closure.
|
|
%closure_fun = function_ref @array_capture_closure : $@convention(thin) (@inout Int, @owned Array<Int>) -> (@out ())
|
|
%closure = partial_apply %closure_fun(%another) : $@convention(thin) (@inout Int, @owned Array<Int>) -> (@out ())
|
|
%closure2 = convert_function %closure : $@callee_owned (@inout Int) -> (@out ()) to $@callee_owned (@inout Int) -> (@out (), @error Error)
|
|
%21 = function_ref @withUnsafeMutableBufferPointer : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error)
|
|
%22 = alloc_stack $()
|
|
%23 = apply [nothrow]%21(%22, %closure2, %the_array) : $@convention(method) (@owned @callee_owned (@inout Int) -> (@out (), @error Error), @inout Array<Int>) -> (@out (), @error Error)
|
|
dealloc_stack %22 : $*()
|
|
|
|
dealloc_stack %the_array: $*Array<Int>
|
|
%24 = tuple ()
|
|
return %24 : $()
|
|
}
|
|
|
|
// The input to StackNesting seems sketchy here; it isn't really
|
|
// obeying joint post-dominance, and is just skating by under the
|
|
// laxer rules for blocks that don't exit.
|
|
// CHECK-LABEL: sil @promote_with_unreachable_block_nest_bug
|
|
// CHECK: bb3:
|
|
// CHECK-NEXT: strong_release [[APPLY_RESULT:%[0-9]+]]
|
|
// CHECK-NEXT: dealloc_stack_ref [[ALLOC:%[0-9]+]]
|
|
// CHECK-NEXT: br bb6
|
|
// CHECK: bb4:
|
|
// CHECK-NEXT: integer_literal
|
|
// CHECK-NEXT: struct
|
|
// CHECK-NEXT: br bb13
|
|
// CHECK: bb5:
|
|
// CHECK-NEXT: strong_release [[APPLY_RESULT]]
|
|
// CHECK-NEXT: dealloc_stack_ref [[ALLOC]]
|
|
// CHECK-NEXT: br bb6
|
|
// CHECK: bb10:
|
|
// CHECK-NEXT: [[ALLOC]] = alloc_ref [stack] $XX
|
|
// CHECK-NEXT: // function_ref
|
|
// CHECK-NEXT: function_ref
|
|
// CHECK-NEXT: [[APPLY_RESULT]] = apply
|
|
// CHECK-NEXT: ref_element_addr
|
|
// CHECK-NEXT: load
|
|
// CHECK-NEXT: cond_br undef, bb2, bb4
|
|
// CHECK: bb13(
|
|
// CHECK-NEXT: unreachable
|
|
sil @promote_with_unreachable_block_nest_bug : $@convention(thin) () -> Int32 {
|
|
bb0:
|
|
%0 = alloc_stack $Builtin.Int32 // user: %30
|
|
%1 = alloc_stack $Builtin.Int32 // users: %29, %27
|
|
cond_br undef, bb1, bb7 // id: %2
|
|
|
|
bb1: // Preds: bb0
|
|
br bb14 // id: %3
|
|
|
|
bb2: // Preds: bb10
|
|
cond_br undef, bb5, bb3 // id: %4
|
|
|
|
bb3: // Preds: bb2
|
|
strong_release %20 : $XX // id: %5
|
|
br bb6 // id: %6
|
|
|
|
bb4: // Preds: bb10
|
|
%7 = integer_literal $Builtin.Int32, 0 // user: %8
|
|
%8 = struct $Int32 (%7 : $Builtin.Int32) // user: %9
|
|
br bb13(%8 : $Int32) // id: %9
|
|
|
|
bb5: // Preds: bb2
|
|
strong_release %20 : $XX // id: %10
|
|
br bb6 // id: %11
|
|
|
|
bb6: // Preds: bb5 bb3
|
|
br bb12 // id: %12
|
|
|
|
bb7: // Preds: bb0
|
|
cond_br undef, bb8, bb9 // id: %13
|
|
|
|
bb8: // Preds: bb7
|
|
cond_br undef, bb10, bb11 // id: %14
|
|
|
|
bb9: // Preds: bb7
|
|
%15 = integer_literal $Builtin.Int32, 0 // user: %16
|
|
%16 = struct $Int32 (%15 : $Builtin.Int32) // user: %17
|
|
br bb13(%16 : $Int32) // id: %17
|
|
|
|
bb10: // Preds: bb8
|
|
%18 = alloc_ref $XX // user: %20
|
|
// function_ref xx_init
|
|
%19 = function_ref @xx_init : $@convention(thin) (@guaranteed XX) -> XX // user: %20
|
|
%20 = apply %19(%18) : $@convention(thin) (@guaranteed XX) -> XX // users: %21, %5, %10
|
|
%21 = ref_element_addr %20 : $XX, #XX.x // user: %22
|
|
%22 = load %21 : $*Int32
|
|
cond_br undef, bb2, bb4 // id: %23
|
|
|
|
bb11: // Preds: bb8
|
|
unreachable // id: %24
|
|
|
|
bb12: // Preds: bb6
|
|
br bb14 // id: %25
|
|
|
|
bb13(%26 : $Int32): // Preds: bb9 bb4
|
|
dealloc_stack %1 : $*Builtin.Int32 // id: %27
|
|
unreachable // id: %28
|
|
|
|
bb14: // Preds: bb12 bb1
|
|
dealloc_stack %1 : $*Builtin.Int32 // id: %29
|
|
dealloc_stack %0 : $*Builtin.Int32 // id: %30
|
|
%31 = integer_literal $Builtin.Int32, 0 // user: %32
|
|
%32 = struct $Int32 (%31 : $Builtin.Int32) // user: %33
|
|
return %32 : $Int32 // id: %33
|
|
} // end sil function 'promote_with_unreachable_block_nest_bug'
|
|
|
|
// Take XX as an argument, but don't actually return or escape it. I'm
|
|
// not sure how this happens in practice, but stack promotion requires
|
|
// that the argument does not escape.
|
|
sil @tryInitXX : $@convention(method) (@owned XX) -> (@owned XX, @error Error) {
|
|
bb0(%0 : $XX):
|
|
%1 = alloc_ref $XX
|
|
strong_release %0 : $XX
|
|
return %1 : $XX
|
|
}
|
|
|
|
// CHECK-LABEL: sil @testUsedByTryApply : $@convention(thin) () -> (@owned XX, @error any Error) {
|
|
// CHECK: alloc_ref [stack] $XX
|
|
// CHECK: try_apply
|
|
// CHECK: bb1(%{{.*}} : $XX):
|
|
// CHECK-NEXT: dealloc_stack_ref
|
|
// CHECK: bb2(%{{.*}}: $any Error):
|
|
// CHECK-NEXT: dealloc_stack_ref
|
|
// CHECK-LABEL: } // end sil function 'testUsedByTryApply'
|
|
sil @testUsedByTryApply : $@convention(thin) () -> (@owned XX, @error Error) {
|
|
bb0:
|
|
%0 = alloc_ref $XX
|
|
%1 = function_ref @tryInitXX : $@convention(method) (@owned XX) -> (@owned XX, @error Error)
|
|
try_apply %1(%0) : $@convention(method) (@owned XX) -> (@owned XX, @error Error), normal bb1, error bb2
|
|
|
|
bb1(%4 : $XX):
|
|
return %4 : $XX
|
|
|
|
bb2(%5 : $Error):
|
|
throw %5 : $Error
|
|
}
|
|
|
|
final class UnownedLink {
|
|
@_hasStorage unowned var link: @sil_unowned UnownedLink
|
|
}
|
|
|
|
// CHECK-LABEL: sil {{.*}}@dont_promote_escaping_through_unowned
|
|
// CHECK: alloc_ref $UnownedLink
|
|
// CHECK-NOT: dealloc_ref
|
|
// CHECK-LABEL: } // end sil function 'dont_promote_escaping_through_unowned'
|
|
sil @dont_promote_escaping_through_unowned : $@convention(method) (@guaranteed UnownedLink) -> () {
|
|
bb0(%0 : $UnownedLink):
|
|
%298 = alloc_ref $UnownedLink
|
|
%20 = ref_to_unowned %298 : $UnownedLink to $@sil_unowned UnownedLink
|
|
%44 = ref_element_addr %0 : $UnownedLink, #UnownedLink.link
|
|
%45 = begin_access [modify] [dynamic] %44 : $*@sil_unowned UnownedLink
|
|
store %20 to %45 : $*@sil_unowned UnownedLink
|
|
end_access %45 : $*@sil_unowned UnownedLink
|
|
strong_release %298 : $UnownedLink
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: sil @dont_crash_with_wrong_stacknesting_with_infinite_loop
|
|
// CHECK: alloc_ref [stack] $XX
|
|
// CHECK-LABEL: } // end sil function 'dont_crash_with_wrong_stacknesting_with_infinite_loop'
|
|
sil @dont_crash_with_wrong_stacknesting_with_infinite_loop : $@convention(thin) () -> () {
|
|
bb0:
|
|
%0 = alloc_stack $Int
|
|
%1 = alloc_ref $XX
|
|
dealloc_stack %0 : $*Int
|
|
cond_br undef, bb1, bb2
|
|
bb1:
|
|
strong_release %1 : $XX
|
|
%4 = tuple ()
|
|
return %4 : $()
|
|
|
|
bb2:
|
|
br bb3
|
|
bb3:
|
|
br bb3
|
|
}
|
|
|
|
// CHECK-LABEL: sil @inner_liverange_jumps_to_outer_in_header_block
|
|
// CHECK: alloc_ref $XX
|
|
// CHECK-LABEL: } // end sil function 'inner_liverange_jumps_to_outer_in_header_block'
|
|
sil @inner_liverange_jumps_to_outer_in_header_block : $@convention(thin) (@owned XX) -> () {
|
|
bb0(%0 : $XX):
|
|
%1 = alloc_stack $XX
|
|
br bb1(%0 : $XX)
|
|
|
|
bb1(%2 : $XX):
|
|
strong_release %2 : $XX
|
|
%4 = alloc_ref $XX
|
|
store %4 to %1 : $*XX
|
|
cond_br undef, bb2, bb3
|
|
|
|
bb2:
|
|
br bb1(%4 : $XX)
|
|
|
|
bb3:
|
|
strong_release %4 : $XX
|
|
dealloc_stack %1 : $*XX
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
|