Optimizer: re-implement the pointer_to_address SILCombine peephole optimizations in swift

Which consists of
* removing redundant `address_to_pointer`-`pointer_to_address` pairs
* optimize `index_raw_pointer` of a manually computed stride to `index_addr`
* remove or increase the alignment based on a "assumeAlignment" builtin

This is a big code cleanup but also has some functional differences for the `address_to_pointer`-`pointer_to_address` pair removal:

* It's not done if the resulting SIL would result in a (detectable) use-after-dealloc_stack memory lifetime failure.
* It's not done if `copy_value`s must be inserted or borrow-scopes must be extended to comply with ownership rules (this was the task of the OwnershipRAUWHelper).

Inserting copies is bad anyway.
Extending borrow-scopes would only be required if the original lifetime of the pointer extends a borrow scope - which shouldn't happen in save code. Therefore this is a very rare case which is not worth handling.
This commit is contained in:
Erik Eckstein
2024-12-20 12:45:44 +01:00
parent 51c6a60d9d
commit 9aff288be4
13 changed files with 583 additions and 441 deletions

View File

@@ -79,7 +79,7 @@ typealias ValueWorklist = Worklist<ValueSet>
typealias OperandWorklist = Worklist<OperandSet>
extension InstructionWorklist {
mutating func pushPredecessors(of inst: Instruction, ignoring ignoreInst: SingleValueInstruction) {
mutating func pushPredecessors(of inst: Instruction, ignoring ignoreInst: Instruction) {
if let prev = inst.previous {
if prev != ignoreInst {
pushIfNotVisited(prev)

View File

@@ -12,30 +12,319 @@
import SIL
extension PointerToAddressInst : OnoneSimplifyable {
extension PointerToAddressInst : OnoneSimplifyable, SILCombineSimplifyable {
/// For a redundant pair of pointer-address conversions, e.g.
///
/// %2 = address_to_pointer %1
/// %3 = pointer_to_address %2 [strict]
///
/// replace all uses of %3 with %1.
///
func simplify(_ context: SimplifyContext) {
if let atp = self.pointer as? AddressToPointerInst,
atp.address.type == self.type,
self.isStrict,
if removeAddressToPointerToAddressPair(of: self, context) {
return
}
if simplifyIndexRawPointer(of: self, context) {
return
}
_ = optimizeAlignment(of: self, context)
}
}
// If the pointer is within an ownership scope, the transformation can break ownership rules, e.g.
// %2 = begin_borrow %1
// %3 = ref_tail_addr %2
// %4 = address_to_pointer %3
// end_borrow %2
// %5 = pointer_to_address %4 <- cannot replace %5 with %3!
//
!atp.address.accessBase.hasLocalOwnershipLifetime
{
self.uses.replaceAll(with: atp.address, context)
/// Remove a redundant pair of pointer-address conversions:
/// ```
/// %2 = address_to_pointer %1
/// %3 = pointer_to_address %2 [strict]
/// ```
/// -> replace all uses of %3 with %1.
///
private func removeAddressToPointerToAddressPair(
of ptr2Addr: PointerToAddressInst,
_ context: SimplifyContext
) -> Bool {
guard let addr2Ptr = ptr2Addr.pointer as? AddressToPointerInst,
ptr2Addr.isStrict,
!ptr2Addr.hasIllegalUsesAfterLifetime(of: addr2Ptr, context)
else {
return false
}
if ptr2Addr.type == addr2Ptr.address.type {
ptr2Addr.replace(with: addr2Ptr.address, context)
} else {
let cast = Builder(before: ptr2Addr, context).createUncheckedAddrCast(from: addr2Ptr.address, to: ptr2Addr.type)
ptr2Addr.replace(with: cast, context)
}
return true
}
/// Replace an `index_raw_pointer` with a manually computed stride with `index_addr`:
/// ```
/// %1 = metatype $T.Type
/// %2 = builtin "strideof"<T>(%1) :
/// %3 = builtin "smul_with_overflow_Int64"(%idx, %2)
/// %4 = tuple_extract %3, 0
/// %5 = index_raw_pointer %ptr, %4
/// %6 = pointer_to_address %5 to [strict] $*T
/// ```
/// ->
/// ```
/// %2 = pointer_to_address %ptr to [strict] $*T
/// %3 = index_addr %2, %idx
/// ```
///
private func simplifyIndexRawPointer(of ptr2Addr: PointerToAddressInst, _ context: SimplifyContext) -> Bool {
guard let indexRawPtr = ptr2Addr.pointer as? IndexRawPointerInst,
let tupleExtract = indexRawPtr.index.lookThroughTruncOrBitCast as? TupleExtractInst,
let strideMul = tupleExtract.tuple as? BuiltinInst, strideMul.id == .SMulOver,
let (index, strideType) = strideMul.indexAndStrideOfMultiplication,
strideType == ptr2Addr.type.objectType
else {
return false
}
let builder = Builder(before: ptr2Addr, context)
let newPtr2Addr = builder.createPointerToAddress(pointer: indexRawPtr.base, addressType: ptr2Addr.type,
isStrict: ptr2Addr.isStrict, isInvariant: ptr2Addr.isInvariant)
let newIndex = builder.createCastIfNeeded(of: index, toIndexTypeOf: indexRawPtr)
let indexAddr = builder.createIndexAddr(base: newPtr2Addr, index: newIndex, needStackProtection: false)
ptr2Addr.replace(with: indexAddr, context)
return true
}
/// Optimize the alignment of a `pointer_to_address` based on `Builtin.assumeAlignment`
/// ```
/// %1 = builtin "assumeAlignment"(%ptr, %align)
/// %2 = pointer_to_address %1 to [align=1] $*T
/// ```
/// ->
/// ```
/// %2 = pointer_to_address %ptr to [align=8] $*T
/// ```
/// or
/// ```
/// %2 = pointer_to_address %ptr to $*T
/// ```
///
/// The goal is to increase the alignment or to remove the attribute completely, which means that
/// the resulting address is naturaly aligned to its type.
///
private func optimizeAlignment(of ptr2Addr: PointerToAddressInst, _ context: SimplifyContext) -> Bool {
guard let assumeAlign = ptr2Addr.pointer as? BuiltinInst, assumeAlign.id == .AssumeAlignment else {
return false
}
if optimizeConstantAlignment(of: ptr2Addr, assumed: assumeAlign, context) {
return true
}
return optimizeTypeAlignment(of: ptr2Addr, assumed: assumeAlign, context)
}
/// Optimize the alignment based on an integer literal
/// ```
/// %align = integer_literal $Builtin.Int64, 16
/// %1 = builtin "assumeAlignment"(%ptr, %align)
/// %2 = pointer_to_address %1 to [align=1] $*T
/// ```
/// ->
/// ```
/// %2 = pointer_to_address %ptr to [align=16] $*T
/// ```
private func optimizeConstantAlignment(
of ptr2Addr: PointerToAddressInst,
assumed assumeAlign: BuiltinInst,
_ context: SimplifyContext
) -> Bool {
guard let alignLiteral = assumeAlign.arguments[1] as? IntegerLiteralInst,
let assumedAlignment = alignLiteral.value
else {
return false
}
ptr2Addr.operand.set(to: assumeAlign.arguments[0], context)
if assumedAlignment == 0 {
// A zero alignment means that the pointer is aligned to the natural alignment of the address type.
ptr2Addr.set(alignment: nil, context)
} else {
if let oldAlignment = ptr2Addr.alignment, assumedAlignment <= oldAlignment {
// Avoid decreasing the alignment, which would be a pessimisation.
return true
}
ptr2Addr.set(alignment: assumedAlignment, context)
}
return true
}
/// Remove the alignment attribute if the alignment is assumed to be the natural alignment of the address type.
/// ```
// %align = builtin "alignof"<T>(%0 : $@thin T.Type)
/// %1 = builtin "assumeAlignment"(%ptr, %align)
/// %2 = pointer_to_address %1 to [align=1] $*T
/// ```
/// ->
/// ```
/// %2 = pointer_to_address %ptr to $*T
/// ```
private func optimizeTypeAlignment(
of ptr2Addr: PointerToAddressInst,
assumed assumeAlign: BuiltinInst,
_ context: SimplifyContext
) -> Bool {
guard let alignOf = assumeAlign.arguments[1].lookThroughIntCasts as? BuiltinInst, alignOf.id == .Alignof,
alignOf.alignOrStrideType == ptr2Addr.type.objectType
else {
return false
}
let pointer = assumeAlign.arguments[0]
ptr2Addr.set(alignment: nil, context)
ptr2Addr.operand.set(to: pointer, context)
return true
}
private extension PointerToAddressInst {
/// Checks if the `pointer_to_address` has uses outside the scope of the `baseAddress`.
/// In such a case removing the `address_to_pointer`-`pointer_to_address` pair would result in
/// invalid SIL. For example:
/// ```
/// %1 = alloc_stack $T
/// %2 = address_to_pointer %1
/// dealloc_stack %1
/// %3 = pointer_to_address %2
/// %4 = load %3
/// ```
/// or
/// ```
/// %1 = begin_borrow %0
/// %2 = ref_element_addr %1, #C.x
/// %3 = address_to_pointer %2
/// end_borrow %1
/// %4 = pointer_to_address %3
/// %5 = load %4
/// ```
func hasIllegalUsesAfterLifetime(of baseAddress: AddressToPointerInst, _ context: SimplifyContext) -> Bool {
var lifetimeFrontier = InstructionSet(context)
defer { lifetimeFrontier.deinitialize() }
switch baseAddress.address.accessBase.addEndLifetimeUses(to: &lifetimeFrontier, context) {
case .unknownLifetime:
return true
case .unlimitedLifetime:
return false
case .limitedLifetime:
var addressUses = AddressUses(of: self, context)
defer { addressUses.deinitialize() }
return addressUses.hasUsesOutside(of: lifetimeFrontier, beginInstruction: baseAddress)
}
}
}
private extension AccessBase {
func addEndLifetimeUses(to frontier: inout InstructionSet, _ context: SimplifyContext) -> Result {
switch self {
case .stack(let allocStack):
frontier.insert(contentsOf: allocStack.deallocations)
return .limitedLifetime
case .global, .argument, .pointer:
return .unlimitedLifetime
case .storeBorrow(let storeBorrow):
frontier.insert(contentsOf: storeBorrow.endBorrows)
return .limitedLifetime
default:
guard let ref = reference else {
return .unknownLifetime
}
switch ref.ownership {
case .owned:
frontier.insert(contentsOf: ref.uses.endingLifetime.users)
return .limitedLifetime
case .guaranteed:
for borrowIntroducer in ref.getBorrowIntroducers(context) {
frontier.insert(contentsOf: borrowIntroducer.scopeEndingOperands.users)
}
return .limitedLifetime
case .none:
// Not in an OSSA function.
return .unlimitedLifetime
case .unowned:
return .unknownLifetime
}
}
}
enum Result {
case unknownLifetime, unlimitedLifetime, limitedLifetime
}
}
private struct AddressUses : AddressDefUseWalker {
var users: InstructionWorklist
init(of address: Value, _ context: SimplifyContext) {
users = InstructionWorklist(context)
_ = walkDownUses(ofAddress: address, path: UnusedWalkingPath())
}
mutating func deinitialize() {
users.deinitialize()
}
mutating func leafUse(address: Operand, path: UnusedWalkingPath) -> WalkResult {
users.pushIfNotVisited(address.instruction)
return .continueWalk
}
mutating func hasUsesOutside(of lifetimeFrontier: InstructionSet, beginInstruction: Instruction) -> Bool {
while let inst = users.pop() {
if lifetimeFrontier.contains(inst) {
return true
}
users.pushPredecessors(of: inst, ignoring: beginInstruction)
}
return false
}
}
private extension Value {
var lookThroughIntCasts: Value {
guard let builtin = self as? BuiltinInst else {
return self
}
switch builtin.id {
case .ZExtOrBitCast, .SExtOrBitCast, .TruncOrBitCast:
return builtin.arguments[0].lookThroughIntCasts
default:
return self
}
}
var lookThroughTruncOrBitCast: Value {
if let truncOrBitCast = self as? BuiltinInst, truncOrBitCast.id == .TruncOrBitCast {
return truncOrBitCast.arguments[0]
}
return self
}
}
private extension BuiltinInst {
var indexAndStrideOfMultiplication : (index: Value, strideType: Type)? {
assert(id == .SMulOver)
if let strideOf = arguments[0].lookThroughIntCasts as? BuiltinInst, strideOf.id == .Strideof {
return (index: arguments[1], strideType: strideOf.alignOrStrideType)
}
if let strideOf = arguments[1].lookThroughIntCasts as? BuiltinInst, strideOf.id == .Strideof {
return (index: arguments[0], strideType: strideOf.alignOrStrideType)
}
return nil
}
var alignOrStrideType: Type {
substitutionMap.replacementTypes[0].loweredType(in: parentFunction)
}
}
private extension Builder {
func createCastIfNeeded(of index: Value, toIndexTypeOf indexRawPtr: IndexRawPointerInst) -> Value {
if let truncOrBitCast = indexRawPtr.index as? BuiltinInst {
assert(truncOrBitCast.id == .TruncOrBitCast)
return createBuiltin(name: truncOrBitCast.name, type: truncOrBitCast.type, arguments: [index])
}
return index
}
}

View File

@@ -117,6 +117,7 @@ private func registerSwiftPasses() {
registerForSILCombine(DestructureTupleInst.self, { run(DestructureTupleInst.self, $0) })
registerForSILCombine(TypeValueInst.self, { run(TypeValueInst.self, $0) })
registerForSILCombine(ClassifyBridgeObjectInst.self, { run(ClassifyBridgeObjectInst.self, $0) })
registerForSILCombine(PointerToAddressInst.self, { run(PointerToAddressInst.self, $0) })
registerForSILCombine(UncheckedEnumDataInst.self, { run(UncheckedEnumDataInst.self, $0) })
// Test passes

View File

@@ -533,6 +533,7 @@ SWIFT_SILCOMBINE_PASS(CopyValueInst)
SWIFT_SILCOMBINE_PASS(DestroyValueInst)
SWIFT_SILCOMBINE_PASS(DestructureStructInst)
SWIFT_SILCOMBINE_PASS(DestructureTupleInst)
SWIFT_SILCOMBINE_PASS(PointerToAddressInst)
SWIFT_SILCOMBINE_PASS(TypeValueInst)
SWIFT_SILCOMBINE_PASS(UncheckedEnumDataInst)

View File

@@ -48,7 +48,6 @@ namespace {
SILValue visitEnumInst(EnumInst *EI);
SILValue visitSelectEnumInst(SelectEnumInst *SEI);
SILValue visitAddressToPointerInst(AddressToPointerInst *ATPI);
SILValue visitPointerToAddressInst(PointerToAddressInst *PTAI);
SILValue visitRefToRawPointerInst(RefToRawPointerInst *RRPI);
SILValue
visitUnconditionalCheckedCastInst(UnconditionalCheckedCastInst *UCCI);
@@ -286,16 +285,6 @@ SILValue InstSimplifier::visitAddressToPointerInst(AddressToPointerInst *ATPI) {
return SILValue();
}
SILValue InstSimplifier::visitPointerToAddressInst(PointerToAddressInst *PTAI) {
// If this address is not strict, then it cannot be replaced by an address
// that may be strict.
if (auto *ATPI = dyn_cast<AddressToPointerInst>(PTAI->getOperand()))
if (ATPI->getOperand()->getType() == PTAI->getType() && PTAI->isStrict())
return ATPI->getOperand();
return SILValue();
}
SILValue InstSimplifier::visitRefToRawPointerInst(RefToRawPointerInst *RefToRaw) {
// Perform the following simplification:
//

View File

@@ -257,7 +257,6 @@ public:
SILInstruction *visitAllocStackInst(AllocStackInst *AS);
SILInstruction *visitSwitchEnumAddrInst(SwitchEnumAddrInst *SEAI);
SILInstruction *visitInjectEnumAddrInst(InjectEnumAddrInst *IEAI);
SILInstruction *visitPointerToAddressInst(PointerToAddressInst *PTAI);
SILInstruction *visitUncheckedAddrCastInst(UncheckedAddrCastInst *UADCI);
SILInstruction *visitUncheckedRefCastInst(UncheckedRefCastInst *URCI);
SILInstruction *visitEndCOWMutationInst(EndCOWMutationInst *URCI);
@@ -365,8 +364,6 @@ public:
SingleValueInstruction *user, SingleValueInstruction *value,
function_ref<SILValue()> newValueGenerator);
SILInstruction *optimizeAlignment(PointerToAddressInst *ptrAdrInst);
InstModCallbacks &getInstModCallbacks() { return deleter.getCallbacks(); }
private:

View File

@@ -219,258 +219,6 @@ SILInstruction *SILCombiner::visitUpcastInst(UpcastInst *uci) {
return nullptr;
}
// Optimize Builtin.assumeAlignment -> pointer_to_address
//
// Case #1. Literal zero = natural alignment
// %1 = integer_literal $Builtin.Int64, 0
// %2 = builtin "assumeAlignment"
// (%0 : $Builtin.RawPointer, %1 : $Builtin.Int64) : $Builtin.RawPointer
// %3 = pointer_to_address %2 : $Builtin.RawPointer to [align=1] $*Int
//
// Erases the `pointer_to_address` `[align=]` attribute:
//
// Case #2. Literal nonzero = forced alignment.
//
// %1 = integer_literal $Builtin.Int64, 16
// %2 = builtin "assumeAlignment"
// (%0 : $Builtin.RawPointer, %1 : $Builtin.Int64) : $Builtin.RawPointer
// %3 = pointer_to_address %2 : $Builtin.RawPointer to [align=1] $*Int
//
// Promotes the `pointer_to_address` `[align=]` attribute to a higher value.
//
// Case #3. Folded dynamic alignment
//
// %1 = builtin "alignof"<T>(%0 : $@thin T.Type) : $Builtin.Word
// %2 = builtin "assumeAlignment"
// (%0 : $Builtin.RawPointer, %1 : $Builtin.Int64) : $Builtin.RawPointer
// %3 = pointer_to_address %2 : $Builtin.RawPointer to [align=1] $*T
//
// Erases the `pointer_to_address` `[align=]` attribute.
SILInstruction *
SILCombiner::optimizeAlignment(PointerToAddressInst *ptrAdrInst) {
if (!ptrAdrInst->alignment())
return nullptr;
llvm::Align oldAlign = ptrAdrInst->alignment().valueOrOne();
// TODO: stripCasts(ptrAdrInst->getOperand()) can be used to find the Builtin,
// but then the Builtin could not be trivially removed. Ideally,
// Builtin.assume will be the immediate operand so it can be removed in the
// common case.
BuiltinInst *assumeAlign = dyn_cast<BuiltinInst>(ptrAdrInst->getOperand());
if (!assumeAlign
|| assumeAlign->getBuiltinKind() != BuiltinValueKind::AssumeAlignment) {
return nullptr;
}
SILValue ptrSrc = assumeAlign->getArguments()[0];
SILValue alignOper = assumeAlign->getArguments()[1];
if (auto *integerInst = dyn_cast<IntegerLiteralInst>(alignOper)) {
llvm::MaybeAlign newAlign(integerInst->getValue().getLimitedValue());
if (newAlign && newAlign.valueOrOne() <= oldAlign)
return nullptr;
// Case #1: the pointer is assumed naturally aligned
//
// Or Case #2: the pointer is assumed to have non-zero alignment greater
// than it current alignment.
//
// In either case, rewrite the address alignment with the assumed alignment,
// and bypass the Builtin.assumeAlign.
return Builder.createPointerToAddress(
ptrAdrInst->getLoc(), ptrSrc, ptrAdrInst->getType(),
ptrAdrInst->isStrict(), ptrAdrInst->isInvariant(), newAlign);
}
// Handle possible 32-bit sign-extension.
SILValue extendedAlignment;
if (match(alignOper,
m_ApplyInst(BuiltinValueKind::SExtOrBitCast,
m_ApplyInst(BuiltinValueKind::TruncOrBitCast,
m_SILValue(extendedAlignment))))) {
alignOper = extendedAlignment;
}
if (match(alignOper,
m_ApplyInst(BuiltinValueKind::Alignof))) {
CanType formalType = cast<BuiltinInst>(alignOper)->getSubstitutions()
.getReplacementTypes()[0]->getReducedType(
ptrAdrInst->getFunction()->getGenericSignature());
SILType instanceType = ptrAdrInst->getFunction()->getLoweredType(
Lowering::AbstractionPattern::getOpaque(), formalType);
if (instanceType.getAddressType() != ptrAdrInst->getType())
return nullptr;
// Case #3: the alignOf type matches the address type. Convert to a
// naturally aligned pointer by erasing alignment and bypassing the
// Builtin.assumeAlign.
return Builder.createPointerToAddress(
ptrAdrInst->getLoc(), ptrSrc, ptrAdrInst->getType(),
ptrAdrInst->isStrict(), ptrAdrInst->isInvariant());
}
return nullptr;
}
SILInstruction *
SILCombiner::
visitPointerToAddressInst(PointerToAddressInst *PTAI) {
auto *F = PTAI->getFunction();
Builder.setCurrentDebugScope(PTAI->getDebugScope());
// If we reach this point, we know that the types must be different since
// otherwise simplifyInstruction would have handled the identity case. This is
// always legal to do since address-to-pointer pointer-to-address implies
// layout compatibility.
//
// (pointer-to-address strict (address-to-pointer %x))
// -> (unchecked_addr_cast %x)
if (PTAI->isStrict()) {
// We can not perform this optimization with ownership until we are able to
// handle issues around interior pointers and expanding borrow scopes.
if (auto *ATPI = dyn_cast<AddressToPointerInst>(PTAI->getOperand())) {
if (!hasOwnership()) {
return Builder.createUncheckedAddrCast(PTAI->getLoc(),
ATPI->getOperand(),
PTAI->getType());
}
OwnershipRAUWHelper helper(ownershipFixupContext, PTAI,
ATPI->getOperand());
if (helper) {
auto replacement = helper.prepareReplacement();
auto *newInst = Builder.createUncheckedAddrCast(
PTAI->getLoc(), replacement, PTAI->getType());
helper.perform(newInst);
return nullptr;
}
}
}
// The rest of these canonicalizations optimize the code around
// pointer_to_address by leave in a pointer_to_address meaning that we do not
// need to worry about moving addresses out of interior pointer scopes.
// Turn this also into an index_addr. We generate this pattern after switching
// the Word type to an explicit Int32 or Int64 in the stdlib.
//
// %101 = builtin "strideof"<Int>(%84 : $@thick Int.Type) :
// $Builtin.Word
// %102 = builtin "zextOrBitCast_Word_Int64"(%101 : $Builtin.Word) :
// $Builtin.Int64
// %111 = builtin "smul_with_overflow_Int64"(%108 : $Builtin.Int64,
// %102 : $Builtin.Int64, %20 : $Builtin.Int1) :
// $(Builtin.Int64, Builtin.Int1)
// %112 = tuple_extract %111 : $(Builtin.Int64, Builtin.Int1), 0
// %113 = builtin "truncOrBitCast_Int64_Word"(%112 : $Builtin.Int64) :
// $Builtin.Word
// %114 = index_raw_pointer %100 : $Builtin.RawPointer, %113 : $Builtin.Word
// %115 = pointer_to_address %114 : $Builtin.RawPointer to [strict] $*Int
//
// This is safe for ownership since our final SIL still has a
// pointer_to_address meaning that we do not need to worry about interior
// pointers.
SILValue Distance;
SILValue TruncOrBitCast;
MetatypeInst *Metatype;
IndexRawPointerInst *IndexRawPtr;
BuiltinInst *StrideMul;
if (match(
PTAI->getOperand(),
m_IndexRawPointerInst(IndexRawPtr))) {
SILValue Ptr = IndexRawPtr->getOperand(0);
SILValue TruncOrBitCast = IndexRawPtr->getOperand(1);
if (match(TruncOrBitCast, m_ApplyInst(BuiltinValueKind::TruncOrBitCast,
m_TupleExtractOperation(
m_BuiltinInst(StrideMul), 0)))) {
if (match(StrideMul,
m_ApplyInst(
BuiltinValueKind::SMulOver, m_SILValue(Distance),
m_ApplyInst(BuiltinValueKind::ZExtOrBitCast,
m_ApplyInst(BuiltinValueKind::Strideof,
m_MetatypeInst(Metatype))))) ||
match(StrideMul,
m_ApplyInst(
BuiltinValueKind::SMulOver,
m_ApplyInst(BuiltinValueKind::ZExtOrBitCast,
m_ApplyInst(BuiltinValueKind::Strideof,
m_MetatypeInst(Metatype))),
m_SILValue(Distance)))) {
SILType InstanceType =
F->getLoweredType(Metatype->getType()
.castTo<MetatypeType>().getInstanceType());
auto *Trunc = cast<BuiltinInst>(TruncOrBitCast);
// Make sure that the type of the metatype matches the type that we are
// casting to so we stride by the correct amount.
if (InstanceType.getAddressType() != PTAI->getType()) {
return nullptr;
}
auto *NewPTAI = Builder.createPointerToAddress(PTAI->getLoc(), Ptr,
PTAI->getType(),
PTAI->isStrict(),
PTAI->isInvariant());
auto DistanceAsWord = Builder.createBuiltin(
PTAI->getLoc(), Trunc->getName(), Trunc->getType(), {}, Distance);
return Builder.createIndexAddr(PTAI->getLoc(), NewPTAI, DistanceAsWord,
/*needsStackProtection=*/ false);
}
}
}
// Turn:
//
// %stride = Builtin.strideof(T) * %distance
// %ptr' = index_raw_pointer %ptr, %stride
// %result = pointer_to_address %ptr, [strict] $T'
//
// To:
//
// %addr = pointer_to_address %ptr, [strict] $T
// %result = index_addr %addr, %distance
//
// This is safe for ownership since our final SIL still has a
// pointer_to_address meaning that we do not need to worry about interior
// pointers.
BuiltinInst *Bytes = nullptr;
if (match(PTAI->getOperand(),
m_IndexRawPointerInst(
m_ValueBase(),
m_TupleExtractOperation(m_BuiltinInst(Bytes), 0)))) {
assert(Bytes != nullptr &&
"Bytes should have been assigned a non-null value");
if (match(Bytes, m_ApplyInst(BuiltinValueKind::SMulOver, m_ValueBase(),
m_ApplyInst(BuiltinValueKind::Strideof,
m_MetatypeInst(Metatype)),
m_ValueBase()))) {
SILType InstanceType =
F->getLoweredType(Metatype->getType()
.castTo<MetatypeType>().getInstanceType());
// Make sure that the type of the metatype matches the type that we are
// casting to so we stride by the correct amount.
if (InstanceType.getAddressType() != PTAI->getType())
return nullptr;
auto IRPI = cast<IndexRawPointerInst>(PTAI->getOperand());
SILValue Ptr = IRPI->getOperand(0);
SILValue Distance = Bytes->getArguments()[0];
auto *NewPTAI =
Builder.createPointerToAddress(PTAI->getLoc(), Ptr, PTAI->getType(),
PTAI->isStrict(), PTAI->isInvariant());
return Builder.createIndexAddr(PTAI->getLoc(), NewPTAI, Distance,
/*needsStackProtection=*/ false);
}
}
return optimizeAlignment(PTAI);
}
SILInstruction *
SILCombiner::visitUncheckedAddrCastInst(UncheckedAddrCastInst *UADCI) {
// These are always safe to perform due to interior pointer ownership

View File

@@ -3,8 +3,7 @@
// Check that accessing an imported enum doesn't trigger stack protection.
// CHECK-LABEL: sil @$s4test6testityyF : $@convention(thin) () -> () {
// CHECK: address_to_pointer %{{[0-9]+}}
// CHECK: address_to_pointer %{{[0-9]+}}
// CHECK-NOT: stack_protection
// CHECK: } // end sil function '$s4test6testityyF'
public func testit() {
var s = S()

View File

@@ -717,13 +717,11 @@ bb0(%0 : @owned $Klass):
return %3 : $Klass
}
// In this case, we need to perform the interior pointer lifetime extension.
//
// CHECK-LABEL: sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext : $@convention(thin) (@owned Klass) -> @owned Klass {
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: } // end sil function 'interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext'
sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext : $@convention(thin) (@owned Klass) -> @owned Klass {
// CHECK-LABEL: sil [ossa] @invalid_use_after_lifetime1 :
// CHECK: address_to_pointer
// CHECK: pointer_to_address
// CHECK: } // end sil function 'invalid_use_after_lifetime1'
sil [ossa] @invalid_use_after_lifetime1 : $@convention(thin) (@owned Klass) -> @owned Klass {
bb0(%0 : @owned $Klass):
%0a = begin_borrow %0 : $Klass
%0b = ref_element_addr %0a : $Klass, #Klass.field
@@ -735,11 +733,11 @@ bb0(%0 : @owned $Klass):
return %3 : $Klass
}
// CHECK-LABEL: sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: } // end sil function 'interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj'
sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-LABEL: sil [ossa] @invalid_use_after_lifetime2 :
// CHECK: address_to_pointer
// CHECK: pointer_to_address
// CHECK: } // end sil function 'invalid_use_after_lifetime2'
sil [ossa] @invalid_use_after_lifetime2 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
bb0(%0 : @owned $Klass):
%0a = begin_borrow %0 : $Klass
%0b = ref_element_addr %0a : $Klass, #Klass.structField
@@ -752,32 +750,12 @@ bb0(%0 : @owned $Klass):
return %3 : $Builtin.NativeObject
}
// Make sure we inserted everything in the right places rather than using the
// ownership verifier.
//
// CHECK-LABEL: sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj_2 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK: bb0([[ARG:%.*]] : @owned
// CHECK-NEXT: br bb1
//
// CHECK: bb1:
// CHECK-NEXT: br bb2
//
// CHECK: bb2:
// CHECK-NEXT: [[BORROWED_ARG:%.*]] = begin_borrow [[ARG]]
// CHECK-NEXT: [[NEW_INT_PTR:%.*]] = ref_element_addr [[BORROWED_ARG]]
// CHECK-NEXT: [[NEW_GEP:%.*]] = struct_element_addr [[NEW_INT_PTR]]
// CHECK-NEXT: br bb3
//
// CHECK: bb3:
// CHECK-NEXT: br bb4
//
// CHECK: bb4:
// CHECK-NEXT: [[RESULT:%.*]] = load [copy] [[NEW_GEP]] :
// CHECK-NEXT: end_borrow [[BORROWED_ARG]]
// CHECK-NEXT: destroy_value [[ARG]]
// CHECK-NEXT: return [[RESULT]]
// CHECK-NEXT: } // end sil function 'interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj_2'
sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj_2 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-LABEL: sil [ossa] @invalid_use_after_lifetime3 :
// CHECK: bb2:
// CHECK-NEXT: address_to_pointer
// CHECK-NEXT: pointer_to_address
// CHECK: } // end sil function 'invalid_use_after_lifetime3'
sil [ossa] @invalid_use_after_lifetime3 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
bb0(%0 : @owned $Klass):
%0a = begin_borrow %0 : $Klass
br bb1
@@ -802,41 +780,12 @@ bb4:
return %3 : $Builtin.NativeObject
}
// Make sure we inserted everything in the right places rather than using the
// ownership verifier given we can't eliminate the underlying RAUW.
//
// CHECK-LABEL: sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj_3 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK: bb0([[ARG:%.*]] : @owned
// CHECK-NEXT: [[ORIGINAL_BORROW:%.*]] = begin_borrow [[ARG]]
// CHECK-NEXT: [[COPIED_ARG:%.*]] = copy_value [[ORIGINAL_BORROW]]
// CHECK-NEXT: br bb1
//
// CHECK: bb1:
// CHECK-NEXT: [[OLD_INT_PTR:%.*]] = ref_element_addr
// CHECK-NEXT: [[OLD_GEP:%.*]] = struct_element_addr [[OLD_INT_PTR]]
// CHECK-NEXT: br bb2
//
// CHECK: bb2:
// CHECK-NEXT: // function_ref
// CHECK-NEXT: [[USER:%.*]] = function_ref @
// CHECK-NEXT: apply [[USER]]([[OLD_GEP]])
// CHECK-NEXT: [[BORROWED_COPIED_ARG:%.*]] = begin_borrow [[COPIED_ARG]]
// CHECK-NEXT: [[NEW_INT_PTR:%.*]] = ref_element_addr [[BORROWED_COPIED_ARG]]
// CHECK-NEXT: [[NEW_GEP:%.*]] = struct_element_addr [[NEW_INT_PTR]]
// CHECK-NEXT: br bb3
//
// CHECK: bb3:
// CHECK-NEXT: end_borrow [[ORIGINAL_BORROW]]
// CHECK-NEXT: br bb4
//
// CHECK: bb4:
// CHECK-NEXT: [[RESULT:%.*]] = load [copy] [[NEW_GEP]]
// CHECK-NEXT: end_borrow [[BORROWED_COPIED_ARG]]
// CHECK-NEXT: destroy_value [[COPIED_ARG]]
// CHECK-NEXT: destroy_value [[ARG]]
// CHECK-NEXT: return [[RESULT]]
// CHECK-NEXT: } // end sil function 'interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj_3'
sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_with_proj_3 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-LABEL: sil [ossa] @invalid_use_after_lifetime4 :
// CHECK: apply
// CHECK-NEXT: address_to_pointer
// CHECK-NEXT: pointer_to_address
// CHECK: } // end sil function 'invalid_use_after_lifetime4'
sil [ossa] @invalid_use_after_lifetime4 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
bb0(%0 : @owned $Klass):
%0a = begin_borrow %0 : $Klass
br bb1
@@ -863,11 +812,12 @@ bb4:
return %3 : $Builtin.NativeObject
}
// CHECK-LABEL: sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_loop_1 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: } // end sil function 'interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_loop_1'
sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_loop_1 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-LABEL: sil [ossa] @invalid_use_after_lifetime5 :
// CHECK: apply
// CHECK-NEXT: address_to_pointer
// CHECK-NEXT: pointer_to_address
// CHECK: } // end sil function 'invalid_use_after_lifetime5'
sil [ossa] @invalid_use_after_lifetime5 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
bb0(%0 : @owned $Klass):
%0a = begin_borrow %0 : $Klass
br bb1
@@ -878,23 +828,23 @@ bb1:
br bb2
bb2:
br bbLoop
br bb3
bbLoop:
bb3:
%f = function_ref @inguaranteed_nativeobject_user : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> ()
apply %f(%0c) : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> ()
%1 = address_to_pointer %0c : $*Builtin.NativeObject to $Builtin.RawPointer
%2 = pointer_to_address %1 : $Builtin.RawPointer to [strict] $*Builtin.NativeObject
cond_br undef, bbBackEdge, bb3
bbBackEdge:
br bbLoop
bb3:
end_borrow %0a : $Klass
br bb4
cond_br undef, bb4, bb5
bb4:
br bb3
bb5:
end_borrow %0a : $Klass
br bb6
bb6:
%3 = load [copy] %2 : $*Builtin.NativeObject
destroy_value %0 : $Klass
return %3 : $Builtin.NativeObject
@@ -915,37 +865,38 @@ bb1:
br bb2
bb2:
br bbLoop
br bb3
bbLoop:
bb3:
%f = function_ref @inguaranteed_nativeobject_user : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> ()
apply %f(%0c) : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> ()
%1 = address_to_pointer %0c : $*Builtin.NativeObject to $Builtin.RawPointer
%2 = pointer_to_address %1 : $Builtin.RawPointer to [strict] $*Builtin.NativeObject
br bbLoop2
bbLoop2:
%3 = load [copy] %2 : $*Builtin.NativeObject
cond_br undef, bbBackEdge, bb3
bbBackEdge:
destroy_value %3 : $Builtin.NativeObject
br bbLoop
bb3:
end_borrow %0a : $Klass
br bb4
bb4:
%3 = load [copy] %2 : $*Builtin.NativeObject
cond_br undef, bb5, bb6
bb5:
destroy_value %3 : $Builtin.NativeObject
br bb3
bb6:
end_borrow %0a : $Klass
br bb7
bb7:
destroy_value %0 : $Klass
return %3 : $Builtin.NativeObject
}
// CHECK-LABEL: sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_loop_3 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: } // end sil function 'interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_loop_3'
sil [ossa] @interior_pointer_lifetime_extension_int_ptr_need_lifetime_ext_loop_3 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
// CHECK-LABEL: sil [ossa] @invalid_use_after_lifetime6 :
// CHECK: apply
// CHECK-NEXT: address_to_pointer
// CHECK-NEXT: pointer_to_address
// CHECK: } // end sil function 'invalid_use_after_lifetime6'
sil [ossa] @invalid_use_after_lifetime6 : $@convention(thin) (@owned Klass) -> @owned Builtin.NativeObject {
bb0(%0 : @owned $Klass):
%0a = begin_borrow %0 : $Klass
br bb1
@@ -954,34 +905,34 @@ bb1:
br bb2
bb2:
br bbLoop(%0a : $Klass, undef : $Klass)
br bb3(%0a : $Klass, undef : $Klass)
bbLoop(%arg : @guaranteed $Klass, %base : @owned $Klass):
bb3(%arg : @guaranteed $Klass, %base : @owned $Klass):
%0b = ref_element_addr %arg : $Klass, #Klass.structField
%0c = struct_element_addr %0b : $*NativeObjectWrapper, #NativeObjectWrapper.obj
br bbLoop2
br bb4
bbLoop2:
bb4:
%f = function_ref @inguaranteed_nativeobject_user : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> ()
apply %f(%0c) : $@convention(thin) (@in_guaranteed Builtin.NativeObject) -> ()
%1 = address_to_pointer %0c : $*Builtin.NativeObject to $Builtin.RawPointer
%2 = pointer_to_address %1 : $Builtin.RawPointer to [strict] $*Builtin.NativeObject
br bbLoop3
br bb5
bbLoop3:
bb5:
end_borrow %arg : $Klass
destroy_value %base : $Klass
%3 = load [copy] %2 : $*Builtin.NativeObject
cond_br undef, bbBackEdge, bb3
cond_br undef, bb6, bb7
bbBackEdge:
bb6:
destroy_value %3 : $Builtin.NativeObject
br bbLoop(undef : $Klass, undef : $Klass)
br bb3(undef : $Klass, undef : $Klass)
bb3:
br bb4
bb7:
br bb8
bb4:
bb8:
destroy_value %0 : $Klass
return %3 : $Builtin.NativeObject
}

View File

@@ -217,8 +217,6 @@ bb0(%0 : @owned $Optional<Klass>):
//
// CHECK-LABEL: sil @test_zero_alignment : $@convention(thin) <T> (Builtin.RawPointer, @thick T.Type) -> @out T {
// CHECK: bb0(%0 : $*T, %1 : $Builtin.RawPointer, %2 : $@thick T.Type):
// CHECK-NOT: integer_literal
// CHECK-NOT: builtin "assumeAlignment"
// CHECK: [[PTR:%.*]] = pointer_to_address %1 : $Builtin.RawPointer to $*T
// CHECK: copy_addr [[PTR]] to [init] %0 : $*T
// CHECK-LABEL: } // end sil function 'test_zero_alignment'
@@ -246,8 +244,6 @@ bb0(%0 : $*T, %1 : $Builtin.RawPointer, %2 : $@thick T.Type):
//
// CHECK-LABEL: sil @test_nonzero_alignment : $@convention(thin) <T> (Builtin.RawPointer, @thick T.Type) -> @out T {
// CHECK: bb0(%0 : $*T, %1 : $Builtin.RawPointer, %2 : $@thick T.Type):
// CHECK-NOT: integer_literal
// CHECK-NOT: builtin "assumeAlignment"
// CHECK: [[PTR:%.*]] = pointer_to_address %1 : $Builtin.RawPointer to [align=8] $*T
// CHECK: copy_addr [[PTR]] to [init] %0 : $*T
// CHECK-LABEL: } // end sil function 'test_nonzero_alignment'

View File

@@ -27,10 +27,9 @@ class KlassWithTailAllocatedElems {
// CHECK: [[PROJ:%.*]] = project_box [[BORROW]]
// CHECK-NOT: pointer_to_address
// CHECK-NOT: address_to_pointer
// CHECK: [[CAST_RESULT:%.*]] = unchecked_addr_cast [[PROJ]]
// CHECK-NOT: pointer_to_address
// CHECK-NOT: address_to_pointer
// CHECK: load [trivial] [[CAST_RESULT]]
// CHECK: load [trivial] [[PROJ]]
// CHECK: end_borrow [[BORROW]]
// CHECK: destroy_value [[BOX]]
// CHECK-LABEL: } // end sil function 'unchecked_addr_cast_borrowed_box'

View File

@@ -5152,9 +5152,9 @@ bb0:
%borrow = begin_borrow %8 : $KlassWithTailAllocatedElems
%addr = ref_tail_addr %borrow : $KlassWithTailAllocatedElems, $Builtin.NativeObject
%1 = address_to_pointer %addr : $*Builtin.NativeObject to $Builtin.RawPointer
end_borrow %borrow : $KlassWithTailAllocatedElems
%2 = pointer_to_address %1 : $Builtin.RawPointer to [strict] $*Builtin.Word
%3 = load [trivial] %2 : $*Builtin.Word
end_borrow %borrow : $KlassWithTailAllocatedElems
destroy_value %8 : $KlassWithTailAllocatedElems
return %3 : $Builtin.Word
}
@@ -5277,16 +5277,13 @@ bb0(%0 : $UnsafePointer<AnyObject>):
}
// CHECK-LABEL: sil [ossa] @test_pointer_to_address : $@convention(thin) (@owned Klass, Builtin.Word) -> @owned AnyObject {
// CHECK: bb0(%0 : @owned $Klass, %1 : $Builtin.Word):
// CHECK: [[CP1:%.*]] = copy_value %0 : $Klass
// CHECK: [[BORROW:%.*]] = begin_borrow [[CP1]] : $Klass
// CHECK: ref_tail_addr [[BORROW]] : $Klass, $AnyObject
// CHECK: index_addr %{{.*}} : $*AnyObject, %1 : $Builtin.Word
// CHECK: index_addr %{{.*}} : $*AnyObject, %1 : $Builtin.Word
// CHECK: load [copy] %{{.*}} : $*AnyObject
// CHECK: end_borrow [[BORROW]] : $Klass
// CHECK: destroy_value [[CP1]] : $Klass
// CHECK: fix_lifetime %0 : $Klass
// CHECK: [[P:%.*]] = address_to_pointer
// CHECK: end_borrow
// CHECK: [[A:%.*]] = pointer_to_address [[P]]
// CHECK: [[I:%.*]] = index_addr [[A]]
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: load [copy] [[I]]
// CHECK-LABEL: } // end sil function 'test_pointer_to_address'
sil [ossa] @test_pointer_to_address : $@convention(thin) (@owned Klass, Builtin.Word) -> @owned AnyObject {
bb0(%0 : @owned $Klass, %1 : $Builtin.Word):
@@ -5306,17 +5303,13 @@ bb0(%0 : @owned $Klass, %1 : $Builtin.Word):
}
// CHECK_COPYPROP-LABEL: sil [ossa] @test_pointer_to_address_copyprop : {{.*}} {
// CHECK_COPYPROP: bb0([[WORD:%[^,]+]] :
// CHECK_COPYPROP: [[INSTANCE:%[^,]+]] = apply
// CHECK_COPYPROP-NOT: copy_value
// CHECK_COPYPROP: [[BORROW:%.*]] = begin_borrow [[INSTANCE]] : $Klass
// CHECK_COPYPROP: ref_tail_addr [[BORROW]] : $Klass, $AnyObject
// CHECK_COPYPROP: index_addr %{{.*}} : $*AnyObject, [[WORD]] : $Builtin.Word
// CHECK_COPYPROP: index_addr %{{.*}} : $*AnyObject, [[WORD]] : $Builtin.Word
// CHECK_COPYPROP: load [copy] %{{.*}} : $*AnyObject
// CHECK_COPYPROP: end_borrow [[BORROW]] : $Klass
// CHECK_COPYPROP: fix_lifetime [[INSTANCE]] : $Klass
// CHECK_COPYPROP: destroy_value [[INSTANCE]] : $Klass
// CHECK_COPYPROP: [[P:%.*]] = address_to_pointer
// CHECK_COPYPROP: end_borrow
// CHECK_COPYPROP: [[A:%.*]] = pointer_to_address [[P]]
// CHECK_COPYPROP: [[I:%.*]] = index_addr [[A]]
// CHECK_COPYPROP-NOT: address_to_pointer
// CHECK_COPYPROP-NOT: pointer_to_address
// CHECK_COPYPROP: load [copy] [[I]]
// CHECK_COPYPROP-LABEL: } // end sil function 'test_pointer_to_address_copyprop'
sil [ossa] @test_pointer_to_address_copyprop : $@convention(thin) (Builtin.Word) -> @owned AnyObject {
bb0(%1 : $Builtin.Word):

View File

@@ -25,10 +25,9 @@ bb0(%0 : $*Int):
}
// CHECK-LABEL: sil @mismatching_type :
// CHECK: address_to_pointer
// CHECK: [[A:%.*]] = pointer_to_address
// CHECK: [[L:%.*]] = load [[A]]
// CHECK: return [[L]]
// CHECK: %1 = unchecked_addr_cast %0
// CHECK: %2 = load %1
// CHECK: return %2
// CHECK: } // end sil function 'mismatching_type'
sil @mismatching_type : $@convention(thin) (@in Int) -> Bool {
bb0(%0 : $*Int):
@@ -63,9 +62,56 @@ bb0(%0 : @guaranteed $C):
%1 = begin_borrow %0 : $C
%2 = ref_tail_addr %1: $C, $Int
%3 = address_to_pointer %2 : $*Int to $Builtin.RawPointer
%5 = pointer_to_address %3 : $Builtin.RawPointer to [strict] $*Int
end_borrow %1 : $C
%6 = load [trivial] %5 : $*Int
return %6 : $Int
}
// CHECK-LABEL: sil [ossa] @borrow_no_escape :
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: } // end sil function 'borrow_no_escape'
sil [ossa] @borrow_no_escape : $@convention(thin) (@guaranteed C) -> Int {
bb0(%0 : @guaranteed $C):
%1 = begin_borrow %0 : $C
%2 = ref_tail_addr %1: $C, $Int
%3 = address_to_pointer %2 : $*Int to $Builtin.RawPointer
%5 = pointer_to_address %3 : $Builtin.RawPointer to [strict] $*Int
%6 = load [trivial] %5 : $*Int
end_borrow %1 : $C
return %6 : $Int
}
// CHECK-LABEL: sil [ossa] @stack_escape :
// CHECK: address_to_pointer
// CHECK: %4 = pointer_to_address
// CHECK: load [trivial] %4
// CHECK: } // end sil function 'stack_escape'
sil [ossa] @stack_escape : $@convention(thin) (Int) -> Int {
bb0(%0 : $Int):
%1 = alloc_stack $Int
store %0 to [trivial] %1
%3 = address_to_pointer %1 to $Builtin.RawPointer
%4 = pointer_to_address %3 to [strict] $*Int
dealloc_stack %1
%6 = load [trivial] %4 : $*Int
return %6 : $Int
}
// CHECK-LABEL: sil [ossa] @stack_no_escape :
// CHECK-NOT: address_to_pointer
// CHECK-NOT: pointer_to_address
// CHECK: load [trivial] %1
// CHECK: } // end sil function 'stack_no_escape'
sil [ossa] @stack_no_escape : $@convention(thin) (Int) -> Int {
bb0(%0 : $Int):
%1 = alloc_stack $Int
store %0 to [trivial] %1
%3 = address_to_pointer %1 to $Builtin.RawPointer
%4 = pointer_to_address %3 to [strict] $*Int
%6 = load [trivial] %4 : $*Int
dealloc_stack %1
return %6 : $Int
}
@@ -90,3 +136,136 @@ bb0:
return %6 : $Int
}
// CHECK-LABEL: sil [ossa] @const_alignemnt :
// CHECK: pointer_to_address %1 to [align=8] $*T
// CHECK: } // end sil function 'const_alignemnt'
sil [ossa] @const_alignemnt : $@convention(thin) <T> (Builtin.RawPointer, @thick T.Type) -> @out T {
bb0(%0 : $*T, %1 : $Builtin.RawPointer, %2 : $@thick T.Type):
%3 = integer_literal $Builtin.Word, 8
%4 = builtin "assumeAlignment"(%1, %3) : $Builtin.RawPointer
%5 = pointer_to_address %4 to [align=1] $*T
copy_addr %5 to [init] %0
%10 = tuple ()
return %10
}
// CHECK-LABEL: sil [ossa] @zero_alignemnt :
// CHECK: pointer_to_address %1 to $*T
// CHECK: } // end sil function 'zero_alignemnt'
sil [ossa] @zero_alignemnt : $@convention(thin) <T> (Builtin.RawPointer, @thick T.Type) -> @out T {
bb0(%0 : $*T, %1 : $Builtin.RawPointer, %2 : $@thick T.Type):
%3 = integer_literal $Builtin.Word, 0
%4 = builtin "assumeAlignment"(%1, %3) : $Builtin.RawPointer
%5 = pointer_to_address %4 to [align=1] $*T
copy_addr %5 to [init] %0
%10 = tuple ()
return %10
}
// CHECK-LABEL: sil [ossa] @type_alignment :
// CHECK: pointer_to_address %1 to $*T
// CHECK: } // end sil function 'type_alignment'
sil [ossa] @type_alignment : $@convention(thin) <T> (Builtin.RawPointer, @thick T.Type) -> @out T {
bb0(%0 : $*T, %1 : $Builtin.RawPointer, %2 : $@thick T.Type):
%3 = metatype $@thick T.Type
%4 = builtin "alignof"<T>(%3) : $Builtin.Word
%5 = builtin "sextOrBitCast_Word_Int64"(%4) : $Builtin.Int64
%6 = builtin "truncOrBitCast_Int64_Word"(%5) : $Builtin.Word
%7 = builtin "assumeAlignment"(%1, %6) : $Builtin.RawPointer
%8 = pointer_to_address %7 to [align=1] $*T
copy_addr %8 to [init] %0
%10 = tuple ()
return %10
}
// CHECK-LABEL: sil [ossa] @indexrawpointer_to_indexaddr :
// CHECK: %2 = pointer_to_address %0
// CHECK-NEXT: %3 = index_addr %2, %1
// CHECK-NEXT: load [trivial] %3
// CHECK: } // end sil function 'indexrawpointer_to_indexaddr'
sil [ossa] @indexrawpointer_to_indexaddr : $@convention(thin) (Builtin.RawPointer, Builtin.Word) -> Int8 {
bb0(%0 : $Builtin.RawPointer, %1 : $Builtin.Word):
%3 = metatype $@thick Int8.Type
%4 = builtin "strideof"<Int8>(%3) : $Builtin.Word
%6 = integer_literal $Builtin.Int1, -1
%7 = builtin "smul_with_overflow_Word"(%4, %1, %6) : $(Builtin.Word, Builtin.Int1)
%8 = tuple_extract %7, 0
%9 = index_raw_pointer %0, %8
%10 = pointer_to_address %9 to [strict] $*Int8
%11 = load [trivial] %10
return %11
}
// CHECK-LABEL: sil [ossa] @indexrawpointer_to_indexaddr_strideof :
// CHECK: %2 = pointer_to_address %0
// CHECK-NEXT: %3 = index_addr %2, %1
// CHECK-NEXT: load [trivial] %3
// CHECK: } // end sil function 'indexrawpointer_to_indexaddr_strideof'
sil [ossa] @indexrawpointer_to_indexaddr_strideof : $@convention(thin) (Builtin.RawPointer, Builtin.Word) -> Int8 {
bb0(%0 : $Builtin.RawPointer, %1 : $Builtin.Word):
%3 = metatype $@thick Int8.Type
%4 = builtin "strideof"<Int8>(%3) : $Builtin.Word
%6 = integer_literal $Builtin.Int1, -1
%7 = builtin "smul_with_overflow_Word"(%4, %1, %6) : $(Builtin.Word, Builtin.Int1)
%8 = tuple_extract %7, 0
%9 = index_raw_pointer %0, %8
%10 = pointer_to_address %9 to [strict] $*Int8
%11 = load [trivial] %10
return %11
}
// CHECK-LABEL: sil [ossa] @indexrawpointer_to_indexaddr_mismatched_metatype :
// CHECK-NOT: index_addr
// CHECK: index_raw_pointer
// CHECK-NOT: index_addr
// CHECK: } // end sil function 'indexrawpointer_to_indexaddr_mismatched_metatype'
sil [ossa] @indexrawpointer_to_indexaddr_mismatched_metatype : $@convention(thin) (Builtin.RawPointer, Builtin.Word) -> Int32 {
bb0(%0 : $Builtin.RawPointer, %1 : $Builtin.Word):
%3 = metatype $@thick Int8.Type
%4 = builtin "strideof"<Int8>(%3) : $Builtin.Word
%6 = integer_literal $Builtin.Int1, -1
%7 = builtin "smul_with_overflow_Word"(%4, %1, %6) : $(Builtin.Word, Builtin.Int1)
%8 = tuple_extract %7, 0
%9 = index_raw_pointer %0, %8
%10 = pointer_to_address %9 to [strict] $*Int32
%11 = load [trivial] %10
return %11
}
// CHECK-LABEL: sil [ossa] @indexrawpointer_to_indexaddr_with_casts :
// CHECK: %2 = pointer_to_address %0
// CHECK-NEXT: %3 = builtin "truncOrBitCast_Int64_Word"(%1)
// CHECK-NEXT: %4 = index_addr %2, %3
// CHECK-NEXT: load [trivial] %4
// CHECK: } // end sil function 'indexrawpointer_to_indexaddr_with_casts'
sil [ossa] @indexrawpointer_to_indexaddr_with_casts : $@convention(thin) (Builtin.RawPointer, Builtin.Int64) -> Int32 {
bb0(%0 : $Builtin.RawPointer, %1: $Builtin.Int64):
%2 = integer_literal $Builtin.Int1, -1
%3 = metatype $@thick Int32.Type
%4 = builtin "strideof"<Int32>(%3) : $Builtin.Word
%5 = builtin "zextOrBitCast_Word_Int64"(%4) : $Builtin.Int64
%6 = builtin "smul_with_overflow_Int64"(%1, %5, %2) : $(Builtin.Int64, Builtin.Int1)
%7 = tuple_extract %6, 0
%8 = builtin "truncOrBitCast_Int64_Word"(%7) : $Builtin.Word
%9 = index_raw_pointer %0, %8
%10 = pointer_to_address %9 to [strict] $*Int32
%11 = load [trivial] %10
return %11
}
// CHECK-LABEL: sil [ossa] @unchecked_ownership_conversion :
// CHECK: %4 = ref_tail_addr
// CHECK-NEXT: %5 = load [trivial] %4
// CHECK: } // end sil function 'unchecked_ownership_conversion'
sil [ossa] @unchecked_ownership_conversion : $@convention(thin) (Unmanaged<C>) -> Int {
bb0(%0 : $Unmanaged<C>):
%1 = struct_extract %0, #Unmanaged._value
%2 = unmanaged_to_ref %1 to $C
%3 = unchecked_ownership_conversion %2, @unowned to @guaranteed
%4 = ref_tail_addr %3, $*Int
%5 = address_to_pointer %4 to $Builtin.RawPointer
%6 = pointer_to_address %5 to [strict] $*Int
%7 = load [trivial] %6
end_borrow %3
return %7
}