Optimizer: support the new array literal initialization pattern in the ForEachLoopUnroll pass

This commit is contained in:
Erik Eckstein
2025-10-10 10:09:16 +02:00
parent 8da6c8a072
commit 02fafc63d6
4 changed files with 143 additions and 79 deletions

View File

@@ -14,6 +14,7 @@
#include "swift/Basic/Assertions.h"
#include "swift/SIL/DebugUtils.h"
#include "swift/SIL/InstructionUtils.h"
#include "swift/SIL/NodeDatastructures.h"
#include "swift/SIL/SILArgument.h"
#include "swift/SIL/SILBuilder.h"
#include "swift/SIL/SILFunction.h"
@@ -732,14 +733,10 @@ SILValue swift::ArraySemanticsCall::getArrayElementStoragePointer() const {
return getArrayUninitializedInitResult(*this, 1);
}
bool swift::ArraySemanticsCall::mapInitializationStores(
llvm::DenseMap<uint64_t, StoreInst *> &ElementValueMap) {
if (getKind() != ArrayCallKind::kArrayUninitialized &&
getKind() != ArrayCallKind::kArrayUninitializedIntrinsic)
return false;
SILValue ElementBuffer = getArrayElementStoragePointer();
static SILValue getElementBaseAddress(ArraySemanticsCall initArray) {
SILValue ElementBuffer = initArray.getArrayElementStoragePointer();
if (!ElementBuffer)
return false;
return SILValue();
// Match initialization stores into ElementBuffer. E.g.
// %82 = struct_extract %element_buffer : $UnsafeMutablePointer<Int>
@@ -756,9 +753,29 @@ bool swift::ArraySemanticsCall::mapInitializationStores(
// mark_dependence can be an operand of the struct_extract or its user.
SILValue UnsafeMutablePointerExtract;
if (getKind() == ArrayCallKind::kArrayUninitializedIntrinsic) {
if (initArray.getKind() == ArrayCallKind::kArrayUninitializedIntrinsic) {
UnsafeMutablePointerExtract = dyn_cast_or_null<MarkDependenceInst>(
getSingleNonDebugUser(ElementBuffer));
if (!UnsafeMutablePointerExtract) {
SILValue array = initArray.getArrayValue();
ValueWorklist worklist(array);
while (SILValue v = worklist.pop()) {
for (auto use : v->getUses()) {
switch (use->getUser()->getKind()) {
case SILInstructionKind::UncheckedRefCastInst:
case SILInstructionKind::StructExtractInst:
case SILInstructionKind::BeginBorrowInst:
worklist.pushIfNotVisited(cast<SingleValueInstruction>(use->getUser()));
break;
case SILInstructionKind::RefTailAddrInst:
return cast<RefTailAddrInst>(use->getUser());
default:
break;
}
}
}
return SILValue();
}
} else {
auto user = getSingleNonDebugUser(ElementBuffer);
// Match mark_dependence (struct_extract or
@@ -774,21 +791,33 @@ bool swift::ArraySemanticsCall::mapInitializationStores(
}
}
if (!UnsafeMutablePointerExtract)
return false;
return SILValue();
auto *PointerToAddress = dyn_cast_or_null<PointerToAddressInst>(
getSingleNonDebugUser(UnsafeMutablePointerExtract));
if (!PointerToAddress)
return SILValue();
return PointerToAddress;
}
bool swift::ArraySemanticsCall::mapInitializationStores(
llvm::DenseMap<uint64_t, StoreInst *> &ElementValueMap) {
if (getKind() != ArrayCallKind::kArrayUninitialized &&
getKind() != ArrayCallKind::kArrayUninitializedIntrinsic)
return false;
SILValue elementAddr = getElementBaseAddress(*this);
if (!elementAddr)
return false;
// Match the stores. We can have either a store directly to the address or
// to an index_addr projection.
for (auto *Op : PointerToAddress->getUses()) {
for (auto *Op : elementAddr->getUses()) {
auto *Inst = Op->getUser();
// Store to the base.
auto *SI = dyn_cast<StoreInst>(Inst);
if (SI && SI->getDest() == PointerToAddress) {
if (SI && SI->getDest() == elementAddr) {
// We have already seen an entry for this index bail.
if (ElementValueMap.count(0))
return false;

View File

@@ -174,7 +174,7 @@ class ArrayInfo {
/// Classify uses of the array into forEach uses, read-only uses etc. and set
/// the fields of this instance appropriately. This function will recursively
/// classify the uses of borrows and copy-values of the array as well.
void classifyUsesOfArray(SILValue arrayValue);
void classifyUsesOfArray(SILValue arrayValue, bool isInInitSection);
public:
ArrayInfo() {}
@@ -293,7 +293,7 @@ static TryApplyInst *isForEachUseOfArray(SILInstruction *user, SILValue array) {
return apply;
}
void ArrayInfo::classifyUsesOfArray(SILValue arrayValue) {
void ArrayInfo::classifyUsesOfArray(SILValue arrayValue, bool isInInitSection) {
for (Operand *operand : arrayValue->getUses()) {
auto *user = operand->getUser();
if (isIncidentalUse(user))
@@ -314,15 +314,21 @@ void ArrayInfo::classifyUsesOfArray(SILValue arrayValue) {
}
// Recursively classify begin_borrow, copy_value, and move_value uses.
if (BeginBorrowInst *beginBorrow = dyn_cast<BeginBorrowInst>(user)) {
classifyUsesOfArray(beginBorrow);
if (isInInitSection) {
// This begin_borrow is used to get the element addresses for array
// initialization. This is happening between the allocate-uninitialized
// and the finalize-array intrinsic calls. We can igore this.
continue;
}
classifyUsesOfArray(beginBorrow, isInInitSection);
continue;
}
if (CopyValueInst *copyValue = dyn_cast<CopyValueInst>(user)) {
classifyUsesOfArray(copyValue);
classifyUsesOfArray(copyValue, isInInitSection);
continue;
}
if (MoveValueInst *moveValue = dyn_cast<MoveValueInst>(user)) {
classifyUsesOfArray(moveValue);
classifyUsesOfArray(moveValue, isInInitSection);
continue;
}
if (DestroyValueInst *destroyValue = dyn_cast<DestroyValueInst>(user)) {
@@ -338,7 +344,7 @@ void ArrayInfo::classifyUsesOfArray(SILValue arrayValue) {
continue;
if (arrayOp.getKind() == ArrayCallKind::kArrayFinalizeIntrinsic) {
classifyUsesOfArray((ApplyInst *)arrayOp);
classifyUsesOfArray((ApplyInst *)arrayOp, /*isInInitSection*/ false);
continue;
}
@@ -357,7 +363,7 @@ bool ArrayInfo::tryInitialize(ApplyInst *apply) {
if (!arrayAllocateUninitCall.mapInitializationStores(elementStoreMap))
return false;
// Collect information about uses of the array value.
classifyUsesOfArray(arrayValue);
classifyUsesOfArray(arrayValue, /*isInInitSection=*/ true);
return true;
}

View File

@@ -6,7 +6,22 @@
import Swift
import Builtin
sil [_semantics "array.uninitialized_intrinsic"] @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
class MyArrayBuffer {
}
struct MyArray<Element>: Sequence {
let buffer: MyArrayBuffer
struct Iterator: IteratorProtocol {
public mutating func next() -> Element?
}
func makeIterator() -> Iterator
}
sil [_semantics "array.uninitialized_intrinsic"] @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
sil [_semantics "array.finalize_intrinsic"] @_finalizeArray : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
sil [_semantics "sequence.forEach"] @forEach : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error any Error, @in_guaranteed τ_0_0) -> @error any Error
@@ -44,32 +59,36 @@ sil @forEachBody : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error a
sil hidden [ossa] @forEachLoopUnrollTest : $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Word, 2
%1 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
%2 = apply %1<Builtin.Int64>(%0) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
(%3, %4a) = destructure_tuple %2 : $(Array<Builtin.Int64>, Builtin.RawPointer)
%4 = mark_dependence %4a : $Builtin.RawPointer on %3 : $Array<Builtin.Int64>
%5 = pointer_to_address %4 : $Builtin.RawPointer to [strict] $*Builtin.Int64
%1 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
%2 = apply %1<Builtin.Int64>(%0) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
(%3, %4a) = destructure_tuple %2 : $(MyArray<Builtin.Int64>, Builtin.RawPointer)
%4 = begin_borrow %3
%b = struct_extract %4, #MyArray.buffer
%5 = ref_tail_addr %b, $Builtin.Int64
%6 = integer_literal $Builtin.Int64, 15
store %6 to [trivial] %5 : $*Builtin.Int64
%12 = integer_literal $Builtin.Word, 1
%13 = index_addr %5 : $*Builtin.Int64, %12 : $Builtin.Word
%14 = integer_literal $Builtin.Int64, 27
store %14 to [trivial] %13 : $*Builtin.Int64
%21 = begin_borrow %3 : $Array<Builtin.Int64>
%22 = alloc_stack $Array<Builtin.Int64>
%23 = store_borrow %21 to %22 : $*Array<Builtin.Int64>
end_borrow %4
%f = function_ref @_finalizeArray : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%a2 = apply %f<Builtin.Int64>(%3) : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%21 = begin_borrow %a2
%22 = alloc_stack $MyArray<Builtin.Int64>
%23 = store_borrow %21 to %22 : $*MyArray<Builtin.Int64>
%24 = function_ref @forEachBody : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error any Error
%25 = convert_function %24 : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error any Error to $@convention(thin) @noescape (@in_guaranteed Builtin.Int64) -> @error any Error
%26 = thin_to_thick_function %25 : $@convention(thin) @noescape (@in_guaranteed Builtin.Int64) -> @error any Error to $@noescape @callee_guaranteed (@in_guaranteed Builtin.Int64) -> @error any Error
// A stub for Sequence.forEach(_:)
%30 = function_ref @forEach : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error
try_apply %30<[Builtin.Int64]>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb1, error bb2
try_apply %30<MyArray<Builtin.Int64>>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb1, error bb2
bb1(%32 : $()):
end_borrow %23 : $*Array<Builtin.Int64>
dealloc_stack %22 : $*Array<Builtin.Int64>
end_borrow %21 : $Array<Builtin.Int64>
destroy_value %3 : $Array<Builtin.Int64>
end_borrow %23 : $*MyArray<Builtin.Int64>
dealloc_stack %22 : $*MyArray<Builtin.Int64>
end_borrow %21 : $MyArray<Builtin.Int64>
destroy_value %a2
%37 = tuple ()
return %37 : $()
@@ -116,30 +135,34 @@ sil @forEachBody2 : $@convention(thin) (@in_guaranteed @callee_guaranteed @subst
sil hidden [ossa] @nonTrivialForEachLoopUnrollTest : $@convention(thin) (@owned @callee_guaranteed @substituted <A> () -> @out A for <Int>, @owned @callee_guaranteed @substituted <A> () -> @out A for <Int>) -> () {
bb0(%0: @owned $@callee_guaranteed @substituted <A> () -> @out A for <Int>, %1: @owned $@callee_guaranteed @substituted <A> () -> @out A for <Int>):
%2 = integer_literal $Builtin.Word, 2
%3 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
%4 = apply %3<() -> Int>(%2) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
(%5, %6a) = destructure_tuple %4 : $(Array<()->Int>, Builtin.RawPointer)
%6 = mark_dependence %6a : $Builtin.RawPointer on %5 : $Array<() -> Int>
%7 = pointer_to_address %6 : $Builtin.RawPointer to [strict] $*@callee_guaranteed @substituted <A> () -> @out A for <Int>
%3 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
%4 = apply %3<() -> Int>(%2) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
(%5, %6a) = destructure_tuple %4 : $(MyArray<()->Int>, Builtin.RawPointer)
%6 = begin_borrow %5
%b = struct_extract %6, #MyArray.buffer
%7 = ref_tail_addr %b, $@callee_guaranteed @substituted <τ_0_0> () -> @out τ_0_0 for <Int>
store %0 to [init] %7 : $*@callee_guaranteed @substituted <A> () -> @out A for <Int>
%12 = integer_literal $Builtin.Word, 1
%13 = index_addr %7 : $*@callee_guaranteed @substituted <A> () -> @out A for <Int>, %12 : $Builtin.Word
store %1 to [init] %13 : $*@callee_guaranteed @substituted <A> () -> @out A for <Int>
%21 = begin_borrow %5 : $Array<()->Int>
%22 = alloc_stack $Array<()->Int>
%23 = store_borrow %21 to %22 : $*Array<()->Int>
end_borrow %6
%f = function_ref @_finalizeArray : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%a2 = apply %f<()->Int>(%5) : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%21 = begin_borrow %a2
%22 = alloc_stack $MyArray<()->Int>
%23 = store_borrow %21 to %22 : $*MyArray<()->Int>
%24 = function_ref @forEachBody2 : $@convention(thin) (@in_guaranteed @callee_guaranteed @substituted <A> () -> @out A for <Int>) -> @error any Error
%25 = convert_function %24 : $@convention(thin) (@in_guaranteed @callee_guaranteed @substituted <A> () -> @out A for <Int>) -> @error any Error to $@convention(thin) @noescape (@in_guaranteed @callee_guaranteed @substituted <A> () -> @out A for <Int>) -> @error any Error
%26 = thin_to_thick_function %25 : $@convention(thin) @noescape (@in_guaranteed @callee_guaranteed @substituted <A> () -> @out A for <Int>) -> @error any Error to $@noescape @callee_guaranteed (@in_guaranteed @callee_guaranteed @substituted <A> () -> @out A for <Int>) -> @error any Error
// A stub for Sequence.forEach(_:)
%30 = function_ref @forEach : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error
try_apply %30<[() -> Int]>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb1, error bb2
try_apply %30<MyArray<() -> Int>>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb1, error bb2
bb1(%32 : $()):
end_borrow %23 : $*Array<() -> Int>
dealloc_stack %22 : $*Array<() -> Int>
end_borrow %21 : $Array<() -> Int>
destroy_value %5 : $Array<() -> Int>
end_borrow %23 : $*MyArray<() -> Int>
dealloc_stack %22 : $*MyArray<() -> Int>
end_borrow %21 : $MyArray<() -> Int>
destroy_value %a2
%37 = tuple ()
return %37 : $()
@@ -153,38 +176,42 @@ bb2(%39 : @owned $Error):
sil hidden [ossa] @checkIndirectFixLifetimeUsesAreIgnored : $@convention(thin) () -> () {
bb0:
%0 = integer_literal $Builtin.Word, 2
%1 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
%2 = apply %1<Builtin.Int64>(%0) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
(%3, %4a) = destructure_tuple %2 : $(Array<Builtin.Int64>, Builtin.RawPointer)
%4 = mark_dependence %4a : $Builtin.RawPointer on %3 : $Array<Builtin.Int64>
%5 = pointer_to_address %4 : $Builtin.RawPointer to [strict] $*Builtin.Int64
%1 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
%2 = apply %1<Builtin.Int64>(%0) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
(%3, %4a) = destructure_tuple %2 : $(MyArray<Builtin.Int64>, Builtin.RawPointer)
%4 = begin_borrow %3
%b = struct_extract %4, #MyArray.buffer
%5 = ref_tail_addr %b, $Builtin.Int64
%6 = integer_literal $Builtin.Int64, 15
store %6 to [trivial] %5 : $*Builtin.Int64
%12 = integer_literal $Builtin.Word, 1
%13 = index_addr %5 : $*Builtin.Int64, %12 : $Builtin.Word
%14 = integer_literal $Builtin.Int64, 27
store %14 to [trivial] %13 : $*Builtin.Int64
%21 = begin_borrow %3 : $Array<Builtin.Int64>
%22 = alloc_stack $Array<Builtin.Int64>
%23 = store_borrow %21 to %22 : $*Array<Builtin.Int64>
end_borrow %4
%f = function_ref @_finalizeArray : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%a2 = apply %f<Builtin.Int64>(%3) : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%21 = begin_borrow %a2
%22 = alloc_stack $MyArray<Builtin.Int64>
%23 = store_borrow %21 to %22 : $*MyArray<Builtin.Int64>
%24 = function_ref @forEachBody : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error Error
%25 = convert_function %24 : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error Error to $@convention(thin) @noescape (@in_guaranteed Builtin.Int64) -> @error Error
%26 = thin_to_thick_function %25 : $@convention(thin) @noescape (@in_guaranteed Builtin.Int64) -> @error Error to $@noescape @callee_guaranteed (@in_guaranteed Builtin.Int64) -> @error Error
// A stub for Sequence.forEach(_:)
%30 = function_ref @forEach : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error
try_apply %30<[Builtin.Int64]>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb1, error bb2
try_apply %30<MyArray<Builtin.Int64>>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb1, error bb2
bb1(%32 : $()):
end_borrow %23 : $*Array<Builtin.Int64>
end_borrow %23 : $*MyArray<Builtin.Int64>
// An indirect fixLifetime use
dealloc_stack %22 : $*Array<Builtin.Int64>
%33 = alloc_stack $Array<Builtin.Int64>
%34 = store_borrow %21 to %33 : $*Array<Builtin.Int64>
fix_lifetime %34 : $*Array<Builtin.Int64>
end_borrow %34 : $*Array<Builtin.Int64>
dealloc_stack %33 : $*Array<Builtin.Int64>
end_borrow %21 : $Array<Builtin.Int64>
destroy_value %3 : $Array<Builtin.Int64>
dealloc_stack %22 : $*MyArray<Builtin.Int64>
%33 = alloc_stack $MyArray<Builtin.Int64>
%34 = store_borrow %21 to %33 : $*MyArray<Builtin.Int64>
fix_lifetime %34 : $*MyArray<Builtin.Int64>
end_borrow %34 : $*MyArray<Builtin.Int64>
dealloc_stack %33 : $*MyArray<Builtin.Int64>
end_borrow %21 : $MyArray<Builtin.Int64>
destroy_value %a2
%37 = tuple ()
return %37 : $()
@@ -202,30 +229,34 @@ bb0:
bb1(%arg : $Builtin.Int64):
%10 = integer_literal $Builtin.Word, 1
%11 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
%12 = apply %11<Builtin.Int64>(%10) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned Array<τ_0_0>, Builtin.RawPointer)
(%13, %14a) = destructure_tuple %12 : $(Array<Builtin.Int64>, Builtin.RawPointer)
%14 = mark_dependence %14a : $Builtin.RawPointer on %13 : $Array<Builtin.Int64>
%15 = pointer_to_address %14 : $Builtin.RawPointer to [strict] $*Builtin.Int64
%11 = function_ref @_allocateUninitializedArray : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
%12 = apply %11<Builtin.Int64>(%10) : $@convention(thin) <τ_0_0> (Builtin.Word) -> (@owned MyArray<τ_0_0>, Builtin.RawPointer)
(%13, %14a) = destructure_tuple %12 : $(MyArray<Builtin.Int64>, Builtin.RawPointer)
%14 = begin_borrow %13
%b = struct_extract %14, #MyArray.buffer
%15 = ref_tail_addr %b, $Builtin.Int64
store %arg to [trivial] %15 : $*Builtin.Int64
end_borrow %14
%f = function_ref @_finalizeArray : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
%a2 = apply %f<Builtin.Int64>(%13) : $@convention(thin) <τ_0_0> (@owned MyArray<τ_0_0>) -> @owned MyArray<τ_0_0>
br bb2(%arg : $Builtin.Int64)
bb2(%arg2 : $Builtin.Int64):
%21 = begin_borrow %13 : $Array<Builtin.Int64>
%22 = alloc_stack $Array<Builtin.Int64>
%23 = store_borrow %21 to %22 : $*Array<Builtin.Int64>
%21 = begin_borrow %a2
%22 = alloc_stack $MyArray<Builtin.Int64>
%23 = store_borrow %21 to %22 : $*MyArray<Builtin.Int64>
%24 = function_ref @forEachBody : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error Error
%25 = convert_function %24 : $@convention(thin) (@in_guaranteed Builtin.Int64) -> @error Error to $@convention(thin) @noescape (@in_guaranteed Builtin.Int64) -> @error Error
%26 = thin_to_thick_function %25 : $@convention(thin) @noescape (@in_guaranteed Builtin.Int64) -> @error Error to $@noescape @callee_guaranteed (@in_guaranteed Builtin.Int64) -> @error Error
// A stub for Sequence.forEach(_:)
%30 = function_ref @forEach : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error
try_apply %30<[Builtin.Int64]>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb3, error bb4
try_apply %30<MyArray<Builtin.Int64>>(%26, %23) : $@convention(method) <τ_0_0 where τ_0_0 : Sequence> (@noescape @callee_guaranteed (@in_guaranteed τ_0_0.Element) -> @error Error, @in_guaranteed τ_0_0) -> @error Error, normal bb3, error bb4
bb3(%32 : $()):
end_borrow %23 : $*Array<Builtin.Int64>
dealloc_stack %22 : $*Array<Builtin.Int64>
end_borrow %21 : $Array<Builtin.Int64>
destroy_value %13 : $Array<Builtin.Int64>
end_borrow %23 : $*MyArray<Builtin.Int64>
dealloc_stack %22 : $*MyArray<Builtin.Int64>
end_borrow %21 : $MyArray<Builtin.Int64>
destroy_value %a2
%37 = tuple ()
return %37 : $()

View File

@@ -65,9 +65,7 @@ func unrollLetArrayLiteralWithClosures(i: Int32, j: Int32) {
// CHECK: [[ALLOCATE:%[0-9]+]] = function_ref @$ss27_allocateUninitializedArrayySayxG_BptBwlF
// CHECK: [[ARRAYTUP:%[0-9]+]] = apply [[ALLOCATE]]<() -> Int32>
// CHECK: [[ARRAYVAL:%[0-9]+]] = tuple_extract [[ARRAYTUP]] : $(Array<() -> Int32>, Builtin.RawPointer), 0
// CHECK: [[STORAGEPTR:%[0-9]+]] = tuple_extract [[ARRAYTUP]] : $(Array<() -> Int32>, Builtin.RawPointer), 1
// CHECK: [[MDI:%[0-9]+]] = mark_dependence [[STORAGEPTR]] : $Builtin.RawPointer on [[ARRAYVAL]] : $Array<() -> Int32>
// CHECK: [[STORAGEADDR:%[0-9]+]] = pointer_to_address [[MDI]]
// CHECK: [[STORAGEADDR:%[0-9]+]] = ref_tail_addr
// CHECK: store [[CLOSURE1:%[0-9]+]] to [[STORAGEADDR]]
// CHECK: [[INDEX1:%[0-9]+]] = index_addr [[STORAGEADDR]]
// CHECK: store [[CLOSURE2:%[0-9]+]] to [[INDEX1]]