mirror of
https://github.com/apple/swift.git
synced 2025-12-21 12:14:44 +01:00
This occured if a stack-promoted object with a devirtualized final release is not actually allocated on the stack. Now the ReleaseDevirtualizer models the procedure of a final release more accurately. It inserts a set_deallocating instruction and calles the deallocator (instead of just the deinit). This changes also includes two peephole optimizations in IRGen and LLVMStackPromotion which get rid of unused runtime calls in case the stack promoted object is really allocated on the stack. This fixes rdar://problem/25068118
147 lines
5.2 KiB
Plaintext
147 lines
5.2 KiB
Plaintext
// RUN: %target-swift-frontend -stack-promotion-limit 48 -Onone -emit-ir %s | FileCheck %s
|
|
|
|
import Builtin
|
|
import Swift
|
|
|
|
class TestClass {
|
|
@sil_stored var a : Int64
|
|
init()
|
|
}
|
|
|
|
struct TestStruct {
|
|
@sil_stored var a : Int64
|
|
@sil_stored var b : Int64
|
|
@sil_stored var c : Int64
|
|
}
|
|
|
|
class BigClass {
|
|
@sil_stored var a : Int64
|
|
@sil_stored var b : Int64
|
|
@sil_stored var c : Int64
|
|
@sil_stored var d : Int64
|
|
@sil_stored var e : Int64
|
|
@sil_stored var f : Int64
|
|
@sil_stored var g : Int64
|
|
init()
|
|
}
|
|
|
|
sil_vtable TestClass {}
|
|
sil_vtable BigClass {}
|
|
|
|
// CHECK-LABEL: define{{( protected)?}} void @simple_promote
|
|
// CHECK: %reference.raw = alloca %[[C:[a-zA-Z0-9_]+]], align 8
|
|
// CHECK-NEXT: [[M:%[0-9]+]] = call %swift.type* @_TMa[[C]]()
|
|
// CHECK-NEXT: [[O:%[0-9]+]] = bitcast %[[C]]* %reference.raw to %swift.refcounted*
|
|
// CHECK-NEXT: %reference.new = call %swift.refcounted* @swift_initStackObject(%swift.type* [[M]], %swift.refcounted* [[O]])
|
|
// CHECK-NEXT: [[R:%[0-9]+]] = bitcast %swift.refcounted* %reference.new to %[[C]]*
|
|
// CHECK-NEXT: call {{.*}} @rt_swift_release {{.*}} [[R]])
|
|
// CHECK-NEXT: [[O2:%[0-9]+]] = bitcast %[[C]]* [[R]] to i8*
|
|
// CHECK-NEXT: call void @llvm.lifetime.end(i64 -1, i8* [[O2]])
|
|
// CHECK-NEXT: ret void
|
|
sil @simple_promote : $@convention(thin) () -> () {
|
|
bb0:
|
|
%o1 = alloc_ref [stack] $TestClass
|
|
strong_release %o1 : $TestClass
|
|
dealloc_ref [stack] %o1 : $TestClass
|
|
|
|
%r = tuple()
|
|
return %r : $()
|
|
}
|
|
|
|
// A stack promotion limit of 48 bytes allows that one of the two alloc_refs
|
|
// can be allocated on the stack.
|
|
|
|
// CHECK-LABEL: define{{( protected)?}} void @exceed_limit
|
|
// CHECK: alloca {{.*}}TestClass
|
|
// CHECK: alloca {{.*}}TestStruct
|
|
// CHECK-NOT: alloca
|
|
// CHECK: call %swift.refcounted* @swift_initStackObject
|
|
// CHECK: call noalias %swift.refcounted* @rt_swift_allocObject
|
|
// CHECK: ret void
|
|
sil @exceed_limit : $@convention(thin) () -> () {
|
|
bb0:
|
|
%o1 = alloc_ref [stack] $TestClass
|
|
%o2 = alloc_ref [stack] $TestClass
|
|
|
|
%s1 = alloc_stack $TestStruct
|
|
|
|
%f = function_ref @unknown_func : $@convention(thin) (@inout TestStruct) -> ()
|
|
%a = apply %f(%s1) : $@convention(thin) (@inout TestStruct) -> ()
|
|
|
|
dealloc_stack %s1 : $*TestStruct
|
|
|
|
strong_release %o1 : $TestClass
|
|
strong_release %o2 : $TestClass
|
|
dealloc_ref [stack] %o2 : $TestClass
|
|
dealloc_ref [stack] %o1 : $TestClass
|
|
|
|
%r = tuple()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: define{{( protected)?}} void @promoted_with_devirtualized_release
|
|
// CHECK: %reference.raw = alloca %[[C:[a-zA-Z0-9_]+]], align 8
|
|
// CHECK-NEXT: [[M:%[0-9]+]] = call %swift.type* @_TMa[[C]]()
|
|
// CHECK-NEXT: [[O:%[0-9]+]] = bitcast %[[C]]* %reference.raw to %swift.refcounted*
|
|
// CHECK-NEXT: %reference.new = call %swift.refcounted* @swift_initStackObject(%swift.type* [[M]], %swift.refcounted* [[O]])
|
|
// CHECK-NEXT: [[R:%[0-9]+]] = bitcast %swift.refcounted* %reference.new to %[[C]]*
|
|
// CHECK-NEXT: call {{.*}} @swift_setDeallocating {{.*}}(%[[C]]* [[R]])
|
|
// CHECK-NEXT: call void @not_inlined_destructor(%[[C]]* [[R]])
|
|
// CHECK-NEXT: [[O2:%[0-9]+]] = bitcast %[[C]]* [[R]] to i8*
|
|
// CHECK-NEXT: call void @llvm.lifetime.end(i64 -1, i8* [[O2]])
|
|
// CHECK-NEXT: ret void
|
|
sil @promoted_with_devirtualized_release : $@convention(thin) () -> () {
|
|
bb0:
|
|
%o1 = alloc_ref [stack] $TestClass
|
|
set_deallocating %o1 : $TestClass
|
|
%f = function_ref @not_inlined_destructor : $@convention(thin) (TestClass) -> ()
|
|
%a = apply %f(%o1) : $@convention(thin) (TestClass) -> ()
|
|
dealloc_ref [stack] %o1 : $TestClass
|
|
|
|
%r = tuple()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: define{{( protected)?}} void @promoted_with_inlined_devirtualized_release
|
|
// CHECK: %reference.raw = alloca %[[C:[a-zA-Z0-9_]+]], align 8
|
|
// CHECK-NEXT: [[M:%[0-9]+]] = call %swift.type* @_TMa[[C]]()
|
|
// CHECK-NEXT: [[O:%[0-9]+]] = bitcast %[[C]]* %reference.raw to %swift.refcounted*
|
|
// CHECK-NEXT: %reference.new = call %swift.refcounted* @swift_initStackObject(%swift.type* [[M]], %swift.refcounted* [[O]])
|
|
// CHECK-NEXT: [[R:%[0-9]+]] = bitcast %swift.refcounted* %reference.new to %[[C]]*
|
|
// CHECK-NOT: call
|
|
// CHECK: [[O2:%[0-9]+]] = bitcast %[[C]]* [[R]] to i8*
|
|
// CHECK-NEXT: call void @llvm.lifetime.end(i64 -1, i8* [[O2]])
|
|
// CHECK-NEXT: ret void
|
|
sil @promoted_with_inlined_devirtualized_release : $@convention(thin) () -> () {
|
|
bb0:
|
|
%o1 = alloc_ref [stack] $TestClass
|
|
set_deallocating %o1 : $TestClass
|
|
dealloc_ref %o1 : $TestClass
|
|
dealloc_ref [stack] %o1 : $TestClass
|
|
|
|
%r = tuple()
|
|
return %r : $()
|
|
}
|
|
|
|
// CHECK-LABEL: define{{( protected)?}} void @not_promoted_with_inlined_devirtualized_release
|
|
// CHECK: [[O:%[0-9]+]] = call {{.*}} @rt_swift_allocObject
|
|
// CHECK-NEXT: [[BC:%[0-9]+]] = bitcast %swift.refcounted* [[O]] to
|
|
// CHECK-NEXT: call {{.*}} @swift_setDeallocating {{.*}}({{.*}} [[BC]])
|
|
// CHECK-NEXT: [[BC2:%[0-9]+]] = bitcast {{.*}} [[BC]] to %swift.refcounted*
|
|
// CHECK-NEXT: call void @swift_deallocClassInstance(%swift.refcounted* [[BC2]], {{.*}})
|
|
// CHECK-NEXT: ret void
|
|
sil @not_promoted_with_inlined_devirtualized_release : $@convention(thin) () -> () {
|
|
bb0:
|
|
%o1 = alloc_ref [stack] $BigClass
|
|
set_deallocating %o1 : $BigClass
|
|
dealloc_ref %o1 : $BigClass
|
|
dealloc_ref [stack] %o1 : $BigClass
|
|
|
|
%r = tuple()
|
|
return %r : $()
|
|
}
|
|
|
|
sil @not_inlined_destructor : $@convention(thin) (TestClass) -> ()
|
|
sil @unknown_func : $@convention(thin) (@inout TestStruct) -> ()
|
|
|