mirror of
https://github.com/apple/swift.git
synced 2025-12-21 12:14:44 +01:00
On architectures where the calling convention uses the same argument register as
return register this allows the argument register to be live through the calls.
We use LLVM's 'returned' attribute on the parameter to facilitate this.
We used to perform this optimization via an optimization pass. This was ripped
out some time ago around commit 955e4ed652.
By using LLVM's 'returned' attribute on swift_*retain, we get the same
optimization from the LLVM backend.
26 lines
1.4 KiB
Plaintext
26 lines
1.4 KiB
Plaintext
// RUN: %target-swift-frontend -assume-parsing-unqualified-ownership-sil -emit-ir %s | %FileCheck %s --check-prefix=CHECK --check-prefix=CHECK-%target-runtime
|
|
|
|
// REQUIRES: CPU=x86_64
|
|
|
|
import Swift
|
|
|
|
protocol Foo: class { }
|
|
|
|
// CHECK-LABEL: define{{( protected)?}} swiftcc void @store_weak({ %swift.weak, i8** }* noalias nocapture sret, i64, i64) {{.*}} {
|
|
// CHECK: entry:
|
|
// CHECK-objc: [[INSTANCE:%.*]] = inttoptr i64 %1 to %objc_object*
|
|
// CHECK-native: [[INSTANCE:%.*]] = inttoptr i64 %1 to %swift.refcounted*
|
|
// CHECK: [[WTABLE:%.*]] = inttoptr i64 %2 to i8**
|
|
// CHECK: [[WTABLE_SLOT:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 1
|
|
// CHECK: store i8** [[WTABLE]], i8*** [[WTABLE_SLOT]], align 8
|
|
// CHECK: [[INSTANCE_SLOT:%.*]] = getelementptr inbounds { %swift.weak, i8** }, { %swift.weak, i8** }* %0, i32 0, i32 0
|
|
// CHECK-objc: call %swift.weak* @swift_unknownWeakAssign(%swift.weak* returned [[INSTANCE_SLOT]], %objc_object* [[INSTANCE]]) {{#[0-9]+}}
|
|
// CHECK-native: call %swift.weak* @swift_weakAssign(%swift.weak* returned [[INSTANCE_SLOT]], %swift.refcounted* [[INSTANCE]]) {{#[0-9]+}}
|
|
// CHECK: ret void
|
|
// CHECK: }
|
|
sil @store_weak : $@convention(thin) (@owned Foo?) -> @out @sil_weak Foo? {
|
|
entry(%w : $*@sil_weak Foo?, %x : $Foo?):
|
|
store_weak %x to %w : $*@sil_weak Foo?
|
|
return undef : $()
|
|
}
|