Files
swift-mirror/test/SILOptimizer/stack_protection.sil
Erik Eckstein 7cceaff5f3 SIL: don't print operand types in textual SIL
Type annotations for instruction operands are omitted, e.g.

```
  %3 = struct $S(%1, %2)
```

Operand types are redundant anyway and were only used for sanity checking in the SIL parser.

But: operand types _are_ printed if the definition of the operand value was not printed yet.
This happens:

* if the block with the definition appears after the block where the operand's instruction is located

* if a block or instruction is printed in isolation, e.g. in a debugger

The old behavior can be restored with `-Xllvm -sil-print-types`.
This option is added to many existing test files which check for operand types in their check-lines.
2024-11-21 18:49:52 +01:00

696 lines
28 KiB
Plaintext

// RUN: %target-sil-opt -sil-print-types -wmo -stack-protection -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=MODULE
// RUN: %target-sil-opt -sil-print-types -function-stack-protection -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=FUNCTION
// RUN: %target-sil-opt -sil-print-types -wmo -stack-protection -enable-move-inout-stack-protector -enable-sil-verify-all %s | %FileCheck %s --check-prefix=CHECK --check-prefix=MOVE
// REQUIRES: swift_in_compiler
sil_stage canonical
import Builtin
import Swift
class C {
@_hasStorage var i: Int64
@_hasStorage var j: Int64
@_hasStorage var s: S
}
struct S {
@_hasStorage var a: Int64
@_hasStorage var b: Int64
}
sil @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
sil @unknown_addr : $@convention(thin) (@in Int64) -> ()
// CHECK-LABEL: sil [stack_protection] @function_local_stack
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'function_local_stack'
sil @function_local_stack : $@convention(thin) (Int64) -> () {
bb0(%0 : $Int64):
%1 = alloc_stack $Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = pointer_to_address %2 : $Builtin.RawPointer to $*Int64
store %0 to %3 : $*Int64
dealloc_stack %1 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @only_loads
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'only_loads'
sil @only_loads : $@convention(thin) () -> Int64 {
bb0:
%1 = alloc_stack $Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = pointer_to_address %2 : $Builtin.RawPointer to $*Int64
%4 = load %3 : $*Int64
dealloc_stack %1 : $*Int64
return %4 : $Int64
}
// CHECK-LABEL: sil @copy_addr_src
// CHECK: } // end sil function 'copy_addr_src'
sil @copy_addr_src : $@convention(thin) () -> @out Int64 {
bb0(%0 : $*Int64):
%1 = alloc_stack $Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = pointer_to_address %2 : $Builtin.RawPointer to $*Int64
copy_addr %3 to %0 : $*Int64
dealloc_stack %1 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil [stack_protection] @copy_addr_dst
// CHECK: } // end sil function 'copy_addr_dst'
sil @copy_addr_dst : $@convention(thin) (@in Int64) -> () {
bb0(%0 : $*Int64):
%1 = alloc_stack $Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = pointer_to_address %2 : $Builtin.RawPointer to $*Int64
copy_addr %0 to %3 : $*Int64
dealloc_stack %1 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil [stack_protection] @escaping_pointer
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'escaping_pointer'
sil @escaping_pointer : $@convention(thin) () -> () {
bb0:
%1 = alloc_stack $Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %3(%2) : $@convention(thin) (Builtin.RawPointer) -> ()
dealloc_stack %1 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @no_stack_protection_for_index_addr
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'no_stack_protection_for_index_addr'
sil @no_stack_protection_for_index_addr : $@convention(thin) () -> () {
bb0:
%0 = alloc_stack $Int64
%1 = address_to_pointer %0 : $*Int64 to $Builtin.RawPointer
%2 = integer_literal $Builtin.Word, 1
%3 = index_addr %0 : $*Int64, %2 : $Builtin.Word
%4 = function_ref @unknown_addr : $@convention(thin) (@in Int64) -> ()
apply %4(%3) : $@convention(thin) (@in Int64) -> ()
dealloc_stack %0 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil [stack_protection] @stack_protection_for_index_addr
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'stack_protection_for_index_addr'
sil @stack_protection_for_index_addr : $@convention(thin) () -> () {
bb0:
%0 = alloc_stack $Int64
%1 = address_to_pointer %0 : $*Int64 to $Builtin.RawPointer
%2 = integer_literal $Builtin.Word, 1
%3 = index_addr [stack_protection] %0 : $*Int64, %2 : $Builtin.Word
%4 = function_ref @unknown_addr : $@convention(thin) (@in Int64) -> ()
apply %4(%3) : $@convention(thin) (@in Int64) -> ()
dealloc_stack %0 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @index_addr_with_only_loads
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'index_addr_with_only_loads'
sil @index_addr_with_only_loads : $@convention(thin) () -> Int64 {
bb0:
%0 = alloc_stack $Int64
%1 = address_to_pointer %0 : $*Int64 to $Builtin.RawPointer
%2 = integer_literal $Builtin.Word, 1
%3 = index_addr [stack_protection] %0 : $*Int64, %2 : $Builtin.Word
%4 = load %3 : $*Int64
dealloc_stack %0 : $*Int64
return %4 : $Int64
}
// CHECK-LABEL: sil [stack_protection] @stack_alloc_builtin
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'stack_alloc_builtin'
sil @stack_alloc_builtin: $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Word, 8
%1 = builtin "stackAlloc"(%0 : $Builtin.Word, %0 : $Builtin.Word, %0 : $Builtin.Word) : $Builtin.RawPointer
%2 = builtin "stackDealloc"(%1 : $Builtin.RawPointer) : $()
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @unprotected_stack_alloc_builtin
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'unprotected_stack_alloc_builtin'
sil @unprotected_stack_alloc_builtin: $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Word, 8
%1 = integer_literal $Builtin.Int1, 0
%2 = builtin "unprotectedStackAlloc"(%0 : $Builtin.Word, %0 : $Builtin.Word, %0 : $Builtin.Word, %1 : $Builtin.Int1) : $Builtin.RawPointer
%3 = builtin "stackDealloc"(%2 : $Builtin.RawPointer) : $()
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil [stack_protection] @function_local_stack_obj
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'function_local_stack_obj'
sil @function_local_stack_obj : $@convention(thin) () -> () {
bb0:
%0 = alloc_ref [stack] $C
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
dealloc_stack_ref %0 : $C
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @function_local_obj
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'function_local_obj'
sil @function_local_obj : $@convention(thin) () -> () {
bb0:
%0 = alloc_ref $C
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
strong_release %0 : $C
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil [stack_protection] @inout_with_unknown_callers1
// MODULE-LABEL: sil @inout_with_unknown_callers1
// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64
// MOVE: copy_addr [take] %0 to [init] [[T]] : $*Int64
// MOVE: address_to_pointer [stack_protection] [[T]]
// MOVE: copy_addr [take] [[T]] to [init] %0 : $*Int64
// MOVE: dealloc_stack [[T]] : $*Int64
// MODULE-NOT: alloc_stack
// MODULE-NOT: copy_addr
// CHECK-MODULE: address_to_pointer [stack_protection] %0
// CHECK: } // end sil function 'inout_with_unknown_callers1'
sil @inout_with_unknown_callers1 : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer
%2 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %2(%1) : $@convention(thin) (Builtin.RawPointer) -> ()
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @in_with_unknown_callers
// CHECK: address_to_pointer [stack_protection] %0
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'in_with_unknown_callers'
sil @in_with_unknown_callers : $@convention(thin) (@in_guaranteed Int64) -> () {
bb0(%0 : $*Int64):
%1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer
%2 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %2(%1) : $@convention(thin) (Builtin.RawPointer) -> ()
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil [stack_protection] @inout_with_modify_access
// MODULE-LABEL: sil @inout_with_modify_access
// MOVE: [[A:%[0-9]+]] = begin_access [modify] [dynamic] %0 : $*Int64
// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64
// MOVE: copy_addr [take] [[A]] to [init] [[T]] : $*Int64
// MOVE: address_to_pointer [stack_protection] [[T]]
// MOVE: copy_addr [take] [[T]] to [init] [[A]] : $*Int64
// MOVE: dealloc_stack [[T]] : $*Int64
// MODULE-NOT: alloc_stack
// MODULE-NOT: copy_addr
// CHECK-MODULE: address_to_pointer [stack_protection] %0
// CHECK: } // end sil function 'inout_with_modify_access'
sil @inout_with_modify_access : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%1 = begin_access [modify] [dynamic] %0 : $*Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %3(%2) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %1 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @inout_with_read_access
// CHECK: [[A:%[0-9]+]] = begin_access [read] [dynamic] %0 : $*Int64
// CHECK: address_to_pointer [stack_protection] [[A]]
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'inout_with_read_access'
sil @inout_with_read_access : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%1 = begin_access [read] [dynamic] %0 : $*Int64
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %3(%2) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %1 : $*Int64
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil [stack_protection] @inout_with_unknown_callers2
// MODULE-LABEL: sil @inout_with_unknown_callers2
// MOVE: [[T:%[0-9]+]] = alloc_stack $S
// MOVE: copy_addr [take] %0 to [init] [[T]] : $*S
// MOVE: [[A:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.a
// MOVE: address_to_pointer [stack_protection] [[A]]
// MOVE: [[B:%[0-9]+]] = struct_element_addr [[T]] : $*S, #S.b
// MOVE: address_to_pointer [stack_protection] [[B]]
// MOVE: copy_addr [take] [[T]] to [init] %0 : $*S
// MOVE: dealloc_stack [[T]] : $*S
// MODULE-NOT: alloc_stack
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'inout_with_unknown_callers2'
sil @inout_with_unknown_callers2 : $@convention(thin) (@inout S) -> () {
bb0(%0 : $*S):
%1 = struct_element_addr %0 : $*S, #S.a
%2 = address_to_pointer [stack_protection] %1 : $*Int64 to $Builtin.RawPointer
%3 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %3(%2) : $@convention(thin) (Builtin.RawPointer) -> ()
%5 = struct_element_addr %0 : $*S, #S.b
%6 = address_to_pointer [stack_protection] %5 : $*Int64 to $Builtin.RawPointer
apply %3(%6) : $@convention(thin) (Builtin.RawPointer) -> ()
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil [stack_protection] @object_with_unknown_callers1
// MODULE-LABEL: sil @object_with_unknown_callers1
// MOVE: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i
// MOVE: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64
// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64
// MOVE: copy_addr [take] [[A]] to [init] [[T]] : $*Int64
// MOVE: address_to_pointer [stack_protection] [[T]]
// MOVE: copy_addr [take] [[T]] to [init] [[A]] : $*Int64
// MOVE: dealloc_stack [[T]] : $*Int64
// MOVE: end_access [[A]] : $*Int64
// MODULE-NOT: alloc_stack
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'object_with_unknown_callers1'
sil @object_with_unknown_callers1 : $@convention(thin) (@guaranteed C) -> () {
bb0(%0 : $C):
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil [stack_protection] @object_with_unknown_callers2
// MODULE-LABEL: sil @object_with_unknown_callers2
// MOVE: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i
// MOVE: [[A:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64
// MOVE: [[T:%[0-9]+]] = alloc_stack $Int64
// MOVE: copy_addr [take] [[A]] to [init] [[T]] : $*Int64
// MOVE: address_to_pointer [stack_protection] [[T]]
// MOVE: address_to_pointer [stack_protection] [[T]]
// MOVE: copy_addr [take] [[T]] to [init] [[A]] : $*Int64
// MOVE: dealloc_stack [[T]] : $*Int64
// MOVE: end_access [[A]] : $*Int64
// MODULE-NOT: alloc_stack
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'object_with_unknown_callers2'
sil @object_with_unknown_callers2 : $@convention(thin) (@guaranteed C) -> () {
bb0(%0 : $C):
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
%6 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
apply %4(%6) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil [stack_protection] @object_with_unknown_callers3
// MODULE-LABEL: sil @object_with_unknown_callers3
// MOVE: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i
// MOVE: [[AI:%[0-9]+]] = begin_access [modify] [static] [[I]] : $*Int64
// MOVE: [[TI:%[0-9]+]] = alloc_stack $Int64
// MOVE: copy_addr [take] [[AI]] to [init] [[TI]] : $*Int64
// MOVE: address_to_pointer [stack_protection] [[TI]]
// MOVE: [[J:%[0-9]+]] = ref_element_addr %0 : $C, #C.j
// MOVE: [[AJ:%[0-9]+]] = begin_access [modify] [static] [[J]] : $*Int64
// MOVE: [[TJ:%[0-9]+]] = alloc_stack $Int64
// MOVE: copy_addr [take] [[AJ]] to [init] [[TJ]] : $*Int64
// MOVE: copy_addr [take] [[TI]] to [init] [[AI]] : $*Int64
// MOVE: end_access [[AI]] : $*Int64
// MOVE: address_to_pointer [stack_protection] [[TJ]]
// MOVE: copy_addr [take] [[TJ]] to [init] [[AJ]] : $*Int64
// MOVE: dealloc_stack [[TJ]] : $*Int64
// MOVE: dealloc_stack [[TI]] : $*Int64
// MOVE: end_access [[AJ]] : $*Int64
// MODULE-NOT: alloc_stack
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'object_with_unknown_callers3'
sil @object_with_unknown_callers3 : $@convention(thin) (@guaranteed C) -> () {
bb0(%0 : $C):
%1 = ref_element_addr %0 : $C, #C.i
// Accesses don't need to be properly nested.
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
%6 = ref_element_addr %0 : $C, #C.j
%7 = begin_access [modify] [static] %6 : $*Int64
end_access %2 : $*Int64
%9 = address_to_pointer [stack_protection] %7 : $*Int64 to $Builtin.RawPointer
apply %4(%9) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %7 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil @object_with_read_access
// CHECK: [[I:%[0-9]+]] = ref_element_addr %0 : $C, #C.i
// CHECK: [[A:%[0-9]+]] = begin_access [read] [static] [[I]] : $*Int64
// CHECK: address_to_pointer [stack_protection] [[A]]
// CHECK-NOT: copy_addr
// CHECK: end_access [[A]] : $*Int64
// CHECK: } // end sil function 'object_with_read_access'
sil @object_with_read_access : $@convention(thin) (@guaranteed C) -> () {
bb0(%0 : $C):
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [read] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil hidden @known_callers_inout
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'known_callers_inout'
sil hidden @known_callers_inout : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%2 = integer_literal $Builtin.Int64, 2
%3 = struct $Int64 (%2 : $Builtin.Int64)
%4 = integer_literal $Builtin.Word, 1
%5 = index_addr [stack_protection] %0 : $*Int64, %4 : $Builtin.Word
store %3 to %5 : $*Int64
%7 = tuple ()
return %7 : $()
}
// MODULE-LABEL: sil [stack_protection] @call_internal_with_inout1
// FUNCTION-LABEL: sil @call_internal_with_inout1
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_internal_with_inout1'
sil @call_internal_with_inout1 : $@convention(thin) () -> () {
bb0:
%0 = alloc_stack [lexical] $Int64
%1 = function_ref @known_callers_inout : $@convention(thin) (@inout Int64) -> ()
%2 = apply %1(%0) : $@convention(thin) (@inout Int64) -> ()
dealloc_stack %0 : $*Int64
// Introduce a cycle in the call graph.
%f = function_ref @call_internal_with_inout2 : $@convention(thin) () -> ()
%a = apply %f() : $@convention(thin) () -> ()
%4 = tuple ()
return %4 : $()
}
// MODULE-LABEL: sil [stack_protection] @call_internal_with_inout2
// FUNCTION-LABEL: sil @call_internal_with_inout2
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_internal_with_inout2'
sil @call_internal_with_inout2 : $@convention(thin) () -> () {
bb0:
%0 = alloc_stack [lexical] $Int64
%1 = function_ref @known_callers_inout : $@convention(thin) (@inout Int64) -> ()
%2 = apply %1(%0) : $@convention(thin) (@inout Int64) -> ()
dealloc_stack %0 : $*Int64
cond_br undef, bb1, bb2
bb1:
// Introduce a cycle in the call graph.
%f = function_ref @call_internal_with_inout1 : $@convention(thin) () -> ()
%a = apply %f() : $@convention(thin) () -> ()
br bb3
bb2:
br bb3
bb3:
%4 = tuple ()
return %4 : $()
}
// MOVE-LABEL: sil hidden [stack_protection] @unknown_callers
// MODULE-LABEL: sil hidden @unknown_callers
// MOVE: copy_addr
// MOVE: copy_addr
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'unknown_callers'
sil hidden @unknown_callers : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%2 = integer_literal $Builtin.Int64, 2
%3 = struct $Int64 (%2 : $Builtin.Int64)
%4 = integer_literal $Builtin.Word, 1
%5 = index_addr [stack_protection] %0 : $*Int64, %4 : $Builtin.Word
store %3 to %5 : $*Int64
%7 = tuple ()
return %7 : $()
}
// CHECK-LABEL: sil @call_unknown_callers
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_unknown_callers'
sil @call_unknown_callers : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%1 = function_ref @unknown_callers : $@convention(thin) (@inout Int64) -> ()
%2 = apply %1(%0) : $@convention(thin) (@inout Int64) -> ()
%4 = tuple ()
return %4 : $()
}
// MOVE-LABEL: sil [stack_protection] @partially_known_callers
// MODULE-LABEL: sil @partially_known_callers
// MOVE: copy_addr
// MOVE: copy_addr
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'partially_known_callers'
sil @partially_known_callers : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer
%2 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %2(%1) : $@convention(thin) (Builtin.RawPointer) -> ()
%4 = tuple ()
return %4 : $()
}
// MOVE-LABEL: sil @call_partially_known_callers
// MODULE-LABEL: sil [stack_protection] @call_partially_known_callers
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_partially_known_callers'
sil @call_partially_known_callers : $@convention(thin) () -> () {
bb0:
%0 = alloc_stack $Int64
%1 = function_ref @partially_known_callers : $@convention(thin) (@inout Int64) -> ()
%2 = apply %1(%0) : $@convention(thin) (@inout Int64) -> ()
dealloc_stack %0 : $*Int64
%4 = tuple ()
return %4 : $()
}
// CHECK-LABEL: sil hidden @known_callers_obj1
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'known_callers_obj1'
sil hidden @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> () {
bb0(%0 : $C):
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
%7 = tuple ()
return %7 : $()
}
// MODULE-LABEL: sil [stack_protection] @call_internal_with_stack_obj
// FUNCTION-LABEL: sil @call_internal_with_stack_obj
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_internal_with_stack_obj'
sil @call_internal_with_stack_obj : $@convention(thin) () -> () {
bb0:
%0 = alloc_ref [stack] $C
%1 = function_ref @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> ()
%2 = apply %1(%0) : $@convention(thin) (@guaranteed C) -> ()
dealloc_stack_ref %0 : $C
%4 = tuple ()
return %4 : $()
}
// CHECK-LABEL: sil @call_internal_with_heap_obj
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_internal_with_heap_obj'
sil @call_internal_with_heap_obj : $@convention(thin) () -> () {
bb0:
cond_br undef, bb1, bb2
bb1:
%0 = alloc_ref $C
br bb3(%0 : $C)
bb2:
%2 = alloc_ref $C
br bb3(%2 : $C)
bb3(%5 : $C):
%6 = function_ref @known_callers_obj1 : $@convention(thin) (@guaranteed C) -> ()
%7 = apply %6(%5) : $@convention(thin) (@guaranteed C) -> ()
strong_release %5 : $C
%r = tuple ()
return %r : $()
}
// MOVE-LABEL: sil hidden [stack_protection] @known_callers_obj2
// MODULE-LABEL: sil hidden @known_callers_obj2
// MOVE: alloc_stack
// MOVE: copy_addr
// MOVE: copy_addr
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'known_callers_obj2'
sil hidden @known_callers_obj2 : $@convention(thin) (@guaranteed C) -> () {
bb0(%0 : $C):
%1 = ref_element_addr %0 : $C, #C.i
%2 = begin_access [modify] [static] %1 : $*Int64
%3 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%4 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %4(%3) : $@convention(thin) (Builtin.RawPointer) -> ()
end_access %2 : $*Int64
%7 = tuple ()
return %7 : $()
}
sil @create_obj : $@convention(thin) () -> @owned C
// CHECK-LABEL: sil @call_known_callers_obj2
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'call_known_callers_obj2'
sil @call_known_callers_obj2 : $@convention(thin) () -> () {
bb0:
cond_br undef, bb1, bb2
bb1:
%0 = alloc_ref $C
br bb3(%0 : $C)
bb2:
%2 = function_ref @create_obj : $@convention(thin) () -> @owned C
%3 = apply %2() : $@convention(thin) () -> @owned C
br bb3(%3 : $C)
bb3(%5 : $C):
%6 = function_ref @known_callers_obj2 : $@convention(thin) (@guaranteed C) -> ()
%7 = apply %6(%5) : $@convention(thin) (@guaranteed C) -> ()
strong_release %5 : $C
%r = tuple ()
return %r : $()
}
// CHECK-LABEL: sil private @closure_with_inout_capture
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'closure_with_inout_capture'
sil private @closure_with_inout_capture : $@convention(thin) (@inout_aliasable Int64) -> () {
bb0(%0 : $*Int64):
%1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer
%2 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %2(%1) : $@convention(thin) (Builtin.RawPointer) -> ()
%r = tuple ()
return %r : $()
}
sil @call_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
// MODULE-LABEL: sil [stack_protection] @test_closure_with_inout_capture
// FUNCTION-LABEL: sil @test_closure_with_inout_capture
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'test_closure_with_inout_capture'
sil @test_closure_with_inout_capture : $@convention(thin) () -> () {
bb0:
%2 = alloc_stack [lexical] $Int64
%3 = integer_literal $Builtin.Int64, 2
%4 = struct $Int64 (%3 : $Builtin.Int64)
store %4 to %2 : $*Int64
%6 = function_ref @closure_with_inout_capture : $@convention(thin) (@inout_aliasable Int64) -> ()
%7 = partial_apply [callee_guaranteed] [on_stack] %6(%2) : $@convention(thin) (@inout_aliasable Int64) -> ()
%8 = function_ref @call_closure : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
%9 = apply %8(%7) : $@convention(thin) (@noescape @callee_guaranteed () -> ()) -> ()
dealloc_stack %7 : $@noescape @callee_guaranteed () -> ()
dealloc_stack %2 : $*Int64
%12 = tuple ()
return %12 : $()
}
// MOVE-LABEL: sil private [stack_protection] @closure_with_inout_arg
// MODULE-LABEL: sil private @closure_with_inout_arg
// MOVE: alloc_stack
// MOVE: copy_addr
// MOVE: copy_addr
// MODULE-NOT: copy_addr
// CHECK: } // end sil function 'closure_with_inout_arg'
sil private @closure_with_inout_arg : $@convention(thin) (@inout Int64) -> () {
bb0(%0 : $*Int64):
%1 = address_to_pointer [stack_protection] %0 : $*Int64 to $Builtin.RawPointer
%2 = function_ref @unknown : $@convention(thin) (Builtin.RawPointer) -> ()
apply %2(%1) : $@convention(thin) (Builtin.RawPointer) -> ()
%r = tuple ()
return %r : $()
}
sil @call_closure_with_arg : $@convention(thin) (@noescape @callee_guaranteed (@inout Int64) -> ()) -> ()
// CHECK-LABEL: sil @test_closure_with_inout_arg
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'test_closure_with_inout_arg'
sil @test_closure_with_inout_arg : $@convention(thin) () -> () {
bb0:
%6 = function_ref @closure_with_inout_arg : $@convention(thin) (@inout Int64) -> ()
%7 = partial_apply [callee_guaranteed] [on_stack] %6() : $@convention(thin) (@inout Int64) -> ()
%8 = function_ref @call_closure_with_arg : $@convention(thin) (@noescape @callee_guaranteed (@inout Int64) -> ()) -> ()
%9 = apply %8(%7) : $@convention(thin) (@noescape @callee_guaranteed (@inout Int64) -> ()) -> ()
dealloc_stack %7 : $@noescape @callee_guaranteed (@inout Int64) -> ()
%12 = tuple ()
return %12 : $()
}
// CHECK-LABEL: sil @test_mem_intrinsics
// CHECK-NOT: copy_addr
// CHECK: } // end sil function 'test_mem_intrinsics'
sil @test_mem_intrinsics : $@convention(thin) (Builtin.RawPointer, Int64) -> () {
bb0(%0 : $Builtin.RawPointer, %1: $Int64):
%2 = alloc_stack $Int64
store %1 to %2 : $*Int64
%4 = address_to_pointer [stack_protection] %2 : $*Int64 to $Builtin.RawPointer
%5 = integer_literal $Builtin.Int64, 8
%6 = integer_literal $Builtin.Int1, 0
%7 = builtin "int_memcpy_RawPointer_RawPointer_Int64"(%0 : $Builtin.RawPointer, %4 : $Builtin.RawPointer, %5 : $Builtin.Int64, %6 : $Builtin.Int1) : $()
%8 = builtin "int_memmove_RawPointer_RawPointer_Int64"(%0 : $Builtin.RawPointer, %4 : $Builtin.RawPointer, %5 : $Builtin.Int64, %6 : $Builtin.Int1) : $()
dealloc_stack %2 : $*Int64
%10 = tuple ()
return %10 : $()
}