From 0aecebd7c1e0abf0b7711beff4f8bbc1c0a6558b Mon Sep 17 00:00:00 2001 From: Michael Gottesman Date: Wed, 4 Mar 2020 20:03:32 -0800 Subject: [PATCH] [semantic-arc-opts] load [copy] -> load_borrow if copy is completely enclosed in certain kinds of exclusive access scopes. Specifically if we have a begin_access [read] or a begin_access [modify] that is never actually written to. In that case if we can prove that the load [copy] is completely within the exclusive access region, we will load_borrow. Example: ``` %0 = begin_access [read] ... %1 = load [copy] %0 ... destroy_value %1 end_access %0 ``` => ``` %0 = begin_access [read] ... %1 = load_borrow %0 ... end_borrow %1 end_access %0 ``` rdar://60064692 --- .../Transforms/SemanticARCOpts.cpp | 36 ++++- test/SILOptimizer/semantic-arc-opts.sil | 124 ++++++++++++++++++ 2 files changed, 158 insertions(+), 2 deletions(-) diff --git a/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp b/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp index c11d978cfed..2c3d5a7f607 100644 --- a/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp +++ b/lib/SILOptimizer/Transforms/SemanticARCOpts.cpp @@ -1089,8 +1089,40 @@ public: } void visitNestedAccess(BeginAccessInst *access) { - // Look through nested accesses. - return next(access->getOperand()); + // First see if we have read/modify. If we do not, just look through the + // nested access. + switch (access->getAccessKind()) { + case SILAccessKind::Init: + case SILAccessKind::Deinit: + return next(access->getOperand()); + case SILAccessKind::Read: + case SILAccessKind::Modify: + break; + } + + // Next check if our live range is completely in the begin/end access + // scope. If so, we may be able to use a load_borrow here! + SmallVector endScopeUses; + transform(access->getEndAccesses(), std::back_inserter(endScopeUses), + [](EndAccessInst *eai) { + return &eai->getAllOperands()[0]; + }); + SmallPtrSet visitedBlocks; + LinearLifetimeChecker checker(visitedBlocks, ARCOpt.getDeadEndBlocks()); + if (!checker.validateLifetime(access, endScopeUses, + liveRange.getDestroyingUses())) { + // If we fail the linear lifetime check, then just recur: + return next(access->getOperand()); + } + + // Otherwise, if we have read, then we are done! + if (access->getAccessKind() == SILAccessKind::Read) { + return answer(false); + } + + // If we have a modify, check if our value is /ever/ written to. If it is + // never actually written to, then we convert to a load_borrow. + return answer(ARCOpt.isAddressWrittenToDefUseAnalysis(access)); } void visitArgumentAccess(SILFunctionArgument *arg) { diff --git a/test/SILOptimizer/semantic-arc-opts.sil b/test/SILOptimizer/semantic-arc-opts.sil index 0a4beaf75c2..98c6b8e7856 100644 --- a/test/SILOptimizer/semantic-arc-opts.sil +++ b/test/SILOptimizer/semantic-arc-opts.sil @@ -1657,3 +1657,127 @@ bb3(%0c : @guaranteed $FakeOptional): %9999 = tuple() return %9999 : $() } + +// CHECK-LABEL: sil [ossa] @loadcopy_to_loadborrow_from_read_access : $@convention(thin) (@guaranteed ClassLet) -> () { +// CHECK-NOT: load [copy] +// CHECK: load_borrow +// CHECK-NOT: load [copy] +// CHECK: } // end sil function 'loadcopy_to_loadborrow_from_read_access' +sil [ossa] @loadcopy_to_loadborrow_from_read_access : $@convention(thin) (@guaranteed ClassLet) -> () { +bb0(%0 : @guaranteed $ClassLet): + %1 = ref_element_addr %0 : $ClassLet, #ClassLet.aVar + %2 = begin_access [read] [dynamic] %1 : $*Klass + %3 = load [copy] %2 : $*Klass + %f = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> () + apply %f(%3) : $@convention(thin) (@guaranteed Klass) -> () + destroy_value %3 : $Klass + end_access %2 : $*Klass + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @loadcopy_to_loadborrow_from_mut_access_without_writes : $@convention(thin) (@guaranteed ClassLet) -> () { +// CHECK-NOT: load [copy] +// CHECK: load_borrow +// CHECK-NOT: load [copy] +// CHECK: } // end sil function 'loadcopy_to_loadborrow_from_mut_access_without_writes' +sil [ossa] @loadcopy_to_loadborrow_from_mut_access_without_writes : $@convention(thin) (@guaranteed ClassLet) -> () { +bb0(%0 : @guaranteed $ClassLet): + %1 = ref_element_addr %0 : $ClassLet, #ClassLet.aVar + %2 = begin_access [modify] [dynamic] %1 : $*Klass + %3 = load [copy] %2 : $*Klass + %f = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> () + apply %f(%3) : $@convention(thin) (@guaranteed Klass) -> () + destroy_value %3 : $Klass + end_access %2 : $*Klass + %9999 = tuple() + return %9999 : $() +} + +// We can with time handle this case by proving that the destroy_addr is after +// the destroy_value. +// +// CHECK-LABEL: sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes : $@convention(thin) (@guaranteed ClassLet) -> () { +// CHECK-NOT: load_borrow +// CHECK: load [copy] +// CHECK-NOT: load_borrow +// CHECK: } // end sil function 'loadcopy_to_loadborrow_from_mut_access_with_writes' +sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes : $@convention(thin) (@guaranteed ClassLet) -> () { +bb0(%0 : @guaranteed $ClassLet): + %1 = ref_element_addr %0 : $ClassLet, #ClassLet.aVar + %2 = begin_access [modify] [dynamic] %1 : $*Klass + %3 = load [copy] %2 : $*Klass + %f = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> () + apply %f(%3) : $@convention(thin) (@guaranteed Klass) -> () + destroy_value %3 : $Klass + destroy_addr %2 : $*Klass + end_access %2 : $*Klass + %9999 = tuple() + return %9999 : $() +} + +// We will never be able to handle this unless we can hoist the copy before the +// destroy_addr. Once we have begin_borrows around all interior_pointers, we can +// handle this version. +// +// CHECK-LABEL: sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes_2 : $@convention(thin) (@guaranteed ClassLet) -> () { +// CHECK-NOT: load_borrow +// CHECK: load [copy] +// CHECK-NOT: load_borrow +// CHECK: } // end sil function 'loadcopy_to_loadborrow_from_mut_access_with_writes_2' +sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes_2 : $@convention(thin) (@guaranteed ClassLet) -> () { +bb0(%0 : @guaranteed $ClassLet): + %1 = ref_element_addr %0 : $ClassLet, #ClassLet.aVar + %2 = begin_access [modify] [dynamic] %1 : $*Klass + %3 = load [copy] %2 : $*Klass + %f = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> () + apply %f(%3) : $@convention(thin) (@guaranteed Klass) -> () + destroy_addr %2 : $*Klass + destroy_value %3 : $Klass + end_access %2 : $*Klass + %9999 = tuple() + return %9999 : $() +} + +// We will never be able to handle this since we can't hoist the destroy_value +// before the guaranteed_klass_user. +// +// CHECK-LABEL: sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes_3 : $@convention(thin) (@guaranteed ClassLet) -> () { +// CHECK-NOT: load_borrow +// CHECK: load [copy] +// CHECK-NOT: load_borrow +// CHECK: } // end sil function 'loadcopy_to_loadborrow_from_mut_access_with_writes_3' +sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes_3 : $@convention(thin) (@guaranteed ClassLet) -> () { +bb0(%0 : @guaranteed $ClassLet): + %1 = ref_element_addr %0 : $ClassLet, #ClassLet.aVar + %2 = begin_access [modify] [dynamic] %1 : $*Klass + %3 = load [copy] %2 : $*Klass + destroy_addr %2 : $*Klass + %f = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> () + apply %f(%3) : $@convention(thin) (@guaranteed Klass) -> () + destroy_value %3 : $Klass + end_access %2 : $*Klass + %9999 = tuple() + return %9999 : $() +} + +// We will never be able to handle this since the end_access is before the use +// of %3, so we can not form a long enough load_borrow. +// +// CHECK-LABEL: sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes_4 : $@convention(thin) (@guaranteed ClassLet) -> () { +// CHECK-NOT: load_borrow +// CHECK: load [copy] +// CHECK-NOT: load_borrow +// CHECK: } // end sil function 'loadcopy_to_loadborrow_from_mut_access_with_writes_4' +sil [ossa] @loadcopy_to_loadborrow_from_mut_access_with_writes_4 : $@convention(thin) (@guaranteed ClassLet) -> () { +bb0(%0 : @guaranteed $ClassLet): + %1 = ref_element_addr %0 : $ClassLet, #ClassLet.aVar + %2 = begin_access [modify] [dynamic] %1 : $*Klass + %3 = load [copy] %2 : $*Klass + end_access %2 : $*Klass + %f = function_ref @guaranteed_klass_user : $@convention(thin) (@guaranteed Klass) -> () + apply %f(%3) : $@convention(thin) (@guaranteed Klass) -> () + destroy_value %3 : $Klass + %9999 = tuple() + return %9999 : $() +}