mirror of
https://github.com/apple/swift.git
synced 2025-12-21 12:14:44 +01:00
140 lines
7.4 KiB
Plaintext
140 lines
7.4 KiB
Plaintext
// RUN: %target-swift-frontend -assume-parsing-unqualified-ownership-sil %s -emit-ir | %FileCheck %s
|
|
|
|
// REQUIRES: CPU=x86_64
|
|
|
|
sil_stage canonical
|
|
|
|
import Builtin
|
|
import Swift
|
|
|
|
struct BigStruct {
|
|
var x, y, z, w, v, u : Int
|
|
}
|
|
|
|
sil @alloc_small : $(@inout Builtin.UnsafeValueBuffer, Int) -> () {
|
|
entry(%b : $*Builtin.UnsafeValueBuffer, %v : $Int):
|
|
%0 = alloc_value_buffer $Int in %b : $*Builtin.UnsafeValueBuffer
|
|
store %v to %0 : $*Int
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @alloc_small([24 x i8]* nocapture dereferenceable({{.*}}), i64)
|
|
// CHECK-NEXT: entry:
|
|
// CHECK-NEXT: [[T0:%.*]] = bitcast [24 x i8]* %0 to %TSi*
|
|
// CHECK-NEXT: [[T2:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 %1, i64* [[T2]], align 8
|
|
// CHECK-NEXT: ret void
|
|
|
|
sil @project_small : $(@inout Builtin.UnsafeValueBuffer, Int) -> () {
|
|
entry(%b : $*Builtin.UnsafeValueBuffer, %v : $Int):
|
|
%0 = project_value_buffer $Int in %b : $*Builtin.UnsafeValueBuffer
|
|
store %v to %0 : $*Int
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @project_small([24 x i8]* nocapture dereferenceable({{.*}}), i64)
|
|
// CHECK-NEXT: entry:
|
|
// CHECK-NEXT: [[T0:%.*]] = bitcast [24 x i8]* %0 to %TSi*
|
|
// CHECK-NEXT: [[T2:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 %1, i64* [[T2]], align 8
|
|
// CHECK-NEXT: ret void
|
|
|
|
sil @dealloc_small : $(@inout Builtin.UnsafeValueBuffer) -> () {
|
|
entry(%b : $*Builtin.UnsafeValueBuffer):
|
|
dealloc_value_buffer $Int in %b : $*Builtin.UnsafeValueBuffer
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @dealloc_small([24 x i8]* nocapture dereferenceable({{.*}}))
|
|
// CHECK-NEXT: entry:
|
|
// CHECK-NEXT: ret void
|
|
|
|
sil @alloc_big : $(@inout Builtin.UnsafeValueBuffer, BigStruct) -> () {
|
|
entry(%b : $*Builtin.UnsafeValueBuffer, %v : $BigStruct):
|
|
%0 = alloc_value_buffer $BigStruct in %b : $*Builtin.UnsafeValueBuffer
|
|
store %v to %0 : $*BigStruct
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @alloc_big([24 x i8]* nocapture dereferenceable({{.*}}), %T13value_buffers9BigStructV* noalias nocapture dereferenceable({{.*}}))
|
|
// CHECK-NEXT: entry:
|
|
// CHECK: [[SLOT0:%.*]] = load i64
|
|
// CHECK: [[SLOT1:%.*]] = load i64
|
|
// CHECK: [[SLOT2:%.*]] = load i64
|
|
// CHECK: [[SLOT3:%.*]] = load i64
|
|
// CHECK: [[SLOT4:%.*]] = load i64
|
|
// CHECK: [[SLOT5:%.*]] = load i64
|
|
// CHECK-NEXT: [[T0:%.*]] = call noalias i8* @swift_rt_swift_slowAlloc(i64 48, i64 7)
|
|
// CHECK-NEXT: [[T1:%.*]] = bitcast [24 x i8]* %0 to i8**
|
|
// CHECK-NEXT: store i8* [[T0]], i8** [[T1]], align 8
|
|
// CHECK-NEXT: [[ADDR:%.*]] = bitcast i8* [[T0]] to %T13value_buffers9BigStructV*
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 0
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT0]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 1
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT1]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 2
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT2]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 3
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT3]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 4
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT4]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 5
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT5]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: ret void
|
|
|
|
sil @project_big : $(@inout Builtin.UnsafeValueBuffer, BigStruct) -> () {
|
|
entry(%b : $*Builtin.UnsafeValueBuffer, %v : $BigStruct):
|
|
%0 = project_value_buffer $BigStruct in %b : $*Builtin.UnsafeValueBuffer
|
|
store %v to %0 : $*BigStruct
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @project_big([24 x i8]* nocapture dereferenceable({{.*}}), %T13value_buffers9BigStructV* noalias nocapture dereferenceable({{.*}}))
|
|
// CHECK-NEXT: entry:
|
|
// CHECK: [[SLOT0:%.*]] = load i64
|
|
// CHECK: [[SLOT1:%.*]] = load i64
|
|
// CHECK: [[SLOT2:%.*]] = load i64
|
|
// CHECK: [[SLOT3:%.*]] = load i64
|
|
// CHECK: [[SLOT4:%.*]] = load i64
|
|
// CHECK: [[SLOT5:%.*]] = load i64
|
|
// CHECK-NEXT: [[T0:%.*]] = bitcast [24 x i8]* %0 to %T13value_buffers9BigStructV**
|
|
// CHECK-NEXT: [[ADDR:%.*]] = load %T13value_buffers9BigStructV*, %T13value_buffers9BigStructV** [[T0]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 0
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT0]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 1
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT1]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 2
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT2]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 3
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT3]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 4
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT4]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: [[T0:%.*]] = getelementptr inbounds %T13value_buffers9BigStructV, %T13value_buffers9BigStructV* [[ADDR]], i32 0, i32 5
|
|
// CHECK-NEXT: [[T1:%.*]] = getelementptr inbounds %TSi, %TSi* [[T0]], i32 0, i32 0
|
|
// CHECK-NEXT: store i64 [[SLOT5]], i64* [[T1]], align 8
|
|
// CHECK-NEXT: ret void
|
|
|
|
sil @dealloc_big : $(@inout Builtin.UnsafeValueBuffer) -> () {
|
|
entry(%b : $*Builtin.UnsafeValueBuffer):
|
|
dealloc_value_buffer $BigStruct in %b : $*Builtin.UnsafeValueBuffer
|
|
%r = tuple ()
|
|
return %r : $()
|
|
}
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @dealloc_big([24 x i8]* nocapture dereferenceable({{.*}}))
|
|
// CHECK-NEXT: entry:
|
|
// CHECK-NEXT: [[T0:%.*]] = bitcast [24 x i8]* %0 to i8**
|
|
// CHECK-NEXT: [[ADDR:%.*]] = load i8*, i8** [[T0]], align 8
|
|
// CHECK-NEXT: call void @swift_rt_swift_slowDealloc(i8* [[ADDR]], i64 48, i64 7)
|
|
// CHECK-NEXT: ret void
|