[IRGen] Make pointers to accessor functions in layout strings relative (#64155)

* [IRGen] Make pointers to accessor functions in layout strings relative

rdar://106319336

Pointers embedded in static layout strings should always be relative, so layout strings can reside in read-only memory.

* Properly handle reference storage ownership

* Pass layout tag and metadata / type layout ppointers separately

* Layout string instantiation fully working

* Fix cases where hasLayoutString flag was not set when it should have

* Update include/swift/ABI/Metadata.h
This commit is contained in:
Dario Rexin
2023-03-17 09:02:51 -07:00
committed by GitHub
parent e8be44d14e
commit 2f8c1a402c
13 changed files with 528 additions and 525 deletions

View File

@@ -648,7 +648,8 @@ SWIFT_RUNTIME_EXPORT
void swift_initStructMetadataWithLayoutString(StructMetadata *self,
StructLayoutFlags flags,
size_t numFields,
const Metadata *const *fieldTypes,
const uint8_t *const *fieldTypes,
const uint8_t *fieldTags,
uint32_t *fieldOffsets);
/// Allocate the metadata for a class and copy fields from the given pattern.

View File

@@ -1260,7 +1260,8 @@ FUNCTION(InitStructMetadataWithLayoutString,
swift_initStructMetadataWithLayoutString, C_CC, AlwaysAvailable,
RETURNS(VoidTy),
ARGS(TypeMetadataPtrTy, SizeTy, SizeTy,
TypeMetadataPtrPtrTy,
Int8PtrPtrTy->getPointerTo(0),
Int8PtrTy,
Int32Ty->getPointerTo()),
ATTRS(NoUnwind),
EFFECT(MetaData))

View File

@@ -760,7 +760,7 @@ namespace {
case ReferenceCounting::Bridge: kind = ScalarKind::BridgeReference; break; \
case ReferenceCounting::Error: kind = ScalarKind::ErrorReference; break; \
case ReferenceCounting::None: kind = ScalarKind::TriviallyDestroyable; break; \
case ReferenceCounting::Custom: kind = ScalarKind::UnknownReference; break; \
case ReferenceCounting::Custom: kind = ScalarKind::CustomReference; break; \
} \
return IGM.typeLayoutCache.getOrCreateScalarEntry(*this, T, kind); \
} \

View File

@@ -2585,12 +2585,31 @@ void irgen::emitLazyTypeContextDescriptor(IRGenModule &IGM,
RequireMetadata_t requireMetadata) {
eraseExistingTypeContextDescriptor(IGM, type);
bool hasLayoutString = false;
auto lowered = getLoweredTypeInPrimaryContext(
IGM, type->getDeclaredType()->getCanonicalType());
auto &ti = IGM.getTypeInfo(lowered);
auto *typeLayoutEntry =
ti.buildTypeLayoutEntry(IGM, lowered, /*useStructLayouts*/ true);
if (IGM.Context.LangOpts.hasFeature(Feature::LayoutStringValueWitnesses)) {
auto genericSig =
lowered.getNominalOrBoundGenericNominal()->getGenericSignature();
hasLayoutString = !!typeLayoutEntry->layoutString(IGM, genericSig);
}
if (auto sd = dyn_cast<StructDecl>(type)) {
if (IGM.Context.LangOpts.hasFeature(Feature::LayoutStringValueWitnessesInstantiation)) {
hasLayoutString |= requiresForeignTypeMetadata(type) ||
needsSingletonMetadataInitialization(IGM, type) ||
(type->isGenericContext() && !isa<FixedTypeInfo>(ti));
}
StructContextDescriptorBuilder(IGM, sd, requireMetadata,
/*hasLayoutString*/ false).emit();
hasLayoutString).emit();
} else if (auto ed = dyn_cast<EnumDecl>(type)) {
EnumContextDescriptorBuilder(IGM, ed, requireMetadata,
/*hasLayoutString*/ false)
hasLayoutString)
.emit();
} else if (auto cd = dyn_cast<ClassDecl>(type)) {
ClassContextDescriptorBuilder(IGM, cd, requireMetadata).emit();
@@ -2873,8 +2892,8 @@ static void emitInitializeFieldOffsetVector(IRGenFunction &IGF,
}
static void emitInitializeFieldOffsetVectorWithLayoutString(
IRGenFunction &IGF, SILType T, llvm::Value *metadata, bool isVWTMutable,
MetadataDependencyCollector *collector) {
IRGenFunction &IGF, SILType T, llvm::Value *metadata,
bool isVWTMutable, MetadataDependencyCollector *collector) {
auto &IGM = IGF.IGM;
assert(IGM.Context.LangOpts.hasFeature(
Feature::LayoutStringValueWitnessesInstantiation));
@@ -2892,38 +2911,68 @@ static void emitInitializeFieldOffsetVectorWithLayoutString(
// Fill out an array with the field type metadata records.
Address fieldsMetadata =
IGF.createAlloca(llvm::ArrayType::get(IGM.TypeMetadataPtrTy, numFields),
IGF.createAlloca(llvm::ArrayType::get(IGM.Int8PtrPtrTy, numFields),
IGM.getPointerAlignment(), "fieldsMetadata");
IGF.Builder.CreateLifetimeStart(fieldsMetadata,
IGM.getPointerSize() * numFields);
fieldsMetadata = IGF.Builder.CreateStructGEP(fieldsMetadata, 0, Size(0));
Address fieldTags =
IGF.createAlloca(llvm::ArrayType::get(IGM.Int8Ty, numFields),
Alignment(1), "fieldTags");
IGF.Builder.CreateLifetimeStart(fieldTags, Size(numFields));
fieldTags = IGF.Builder.CreateStructGEP(fieldTags, 0, Size(0));
unsigned index = 0;
forEachField(IGM, target, [&](Field field) {
assert(field.isConcrete() &&
"initializing offset vector for type with missing member?");
SILType propTy = field.getType(IGM, T);
llvm::Value *fieldMetatype;
llvm::Value *fieldTag;
if (auto ownership = propTy.getReferenceStorageOwnership()) {
auto &ti = IGF.getTypeInfo(propTy.getObjectType());
auto *fixedTI = dyn_cast<FixedTypeInfo>(&ti);
assert(fixedTI && "Reference should have fixed layout");
auto fixedSize = fixedTI->getFixedSize();
fieldMetatype = emitTypeLayoutRef(IGF, propTy, collector);
switch (*ownership) {
case ReferenceOwnership::Unowned:
fieldTag = llvm::Constant::getIntegerValue(
IGM.Int8Ty, APInt(IGM.Int8Ty->getBitWidth(),
fixedSize == IGM.getPointerSize() ? 0x1 : 0x2));
break;
case ReferenceOwnership::Weak:
fieldMetatype = llvm::Constant::getIntegerValue(
IGM.TypeMetadataPtrTy, APInt(IGM.IntPtrTy->getBitWidth(), 0x7));
fieldTag = llvm::Constant::getIntegerValue(
IGM.Int8Ty, APInt(IGM.Int8Ty->getBitWidth(),
fixedSize == IGM.getPointerSize() ? 0x3 : 0x4));
break;
case ReferenceOwnership::Unmanaged:
fieldTag = llvm::Constant::getIntegerValue(
IGM.Int8Ty, APInt(IGM.Int8Ty->getBitWidth(),
fixedSize == IGM.getPointerSize() ? 0x5 : 0x6));
break;
case ReferenceOwnership::Strong:
case ReferenceOwnership::Unowned:
case ReferenceOwnership::Unmanaged:
llvm_unreachable("Unmanaged reference should have been lowered");
llvm_unreachable("Strong reference should have been lowered");
break;
}
} else {
fieldTag = llvm::Constant::getIntegerValue(
IGM.Int8Ty, APInt(IGM.Int8Ty->getBitWidth(), 0x0));
auto request = DynamicMetadataRequest::getNonBlocking(
MetadataState::LayoutComplete, collector);
fieldMetatype = IGF.emitTypeMetadataRefForLayout(propTy, request);
fieldMetatype = IGF.Builder.CreateBitCast(fieldMetatype, IGM.Int8PtrPtrTy);
}
Address fieldTagAddr = IGF.Builder.CreateConstArrayGEP(
fieldTags, index, Size::forBits(IGM.Int8Ty->getBitWidth()));
IGF.Builder.CreateStore(fieldTag, fieldTagAddr);
Address fieldMetatypeAddr = IGF.Builder.CreateConstArrayGEP(
fieldsMetadata, index, IGM.getPointerSize());
IGF.Builder.CreateStore(fieldMetatype, fieldMetatypeAddr);
++index;
});
assert(index == numFields);
@@ -2937,8 +2986,10 @@ static void emitInitializeFieldOffsetVectorWithLayoutString(
IGF.Builder.CreateCall(
IGM.getInitStructMetadataWithLayoutStringFunctionPointer(),
{metadata, IGM.getSize(Size(uintptr_t(flags))), numFieldsV,
fieldsMetadata.getAddress(), fieldVector});
fieldsMetadata.getAddress(), fieldTags.getAddress(), fieldVector});
IGF.Builder.CreateLifetimeEnd(fieldTags,
IGM.getPointerSize() * numFields);
IGF.Builder.CreateLifetimeEnd(fieldsMetadata,
IGM.getPointerSize() * numFields);
}
@@ -5133,8 +5184,10 @@ namespace {
return false;
}
return !!getLayoutString() ||
IGM.Context.LangOpts.hasFeature(
Feature::LayoutStringValueWitnessesInstantiation);
(IGM.Context.LangOpts.hasFeature(
Feature::LayoutStringValueWitnessesInstantiation) &&
(HasDependentVWT || HasDependentMetadata) &&
!isa<FixedTypeInfo>(IGM.getTypeInfo(getLoweredType())));
}
llvm::Constant *emitNominalTypeDescriptor() {

View File

@@ -731,6 +731,9 @@ namespace {
std::vector<TypeLayoutEntry *> fields;
for (auto &field : getFields()) {
auto fieldTy = field.getType(IGM, T);
if (!fieldTy) {
return IGM.typeLayoutCache.getOrCreateTypeInfoBasedEntry(*this, T);
}
fields.push_back(
field.getTypeInfo().buildTypeLayoutEntry(IGM, fieldTy, useStructLayouts));
}

View File

@@ -35,6 +35,25 @@ using namespace irgen;
namespace swift {
namespace irgen {
enum class LayoutStringFlags : uint64_t {
Empty = 0,
// TODO: Track other useful information tha can be used to optimize layout
// strings, like different reference kinds contained in the string
// number of ref counting operations (maybe up to 4), so we can
// use witness functions optimized for these cases.
HasRelativePointers = (1ULL << 63),
};
inline bool operator&(LayoutStringFlags a, LayoutStringFlags b) {
return (uint64_t(a) & uint64_t(b)) != 0;
}
inline LayoutStringFlags operator|(LayoutStringFlags a, LayoutStringFlags b) {
return LayoutStringFlags(uint64_t(a) | uint64_t(b));
}
inline LayoutStringFlags &operator|=(LayoutStringFlags &a, LayoutStringFlags b) {
return a = (a | b);
}
class LayoutStringBuilder {
public:
enum class RefCountingKind : uint8_t {
@@ -51,17 +70,13 @@ public:
ObjC = 0x0a,
Custom = 0x0b,
Metatype = 0x0c,
Generic = 0x0d,
// reserved
// Metatype = 0x0c,
Existential = 0x0e,
Resilient = 0x0f,
SinglePayloadEnum = 0x10,
Alignment = 0x7d,
DynamicAlignmentStart = 0x7e,
DynamicAlignmentEnd = 0x7f,
Skip = 0x80,
// We may use the MSB as flag that a count follows,
// so all following values are reserved
@@ -69,6 +84,7 @@ public:
};
private:
struct RefCounting {
RefCountingKind kind;
union {
@@ -106,7 +122,6 @@ private:
};
std::vector<RefCounting> refCountings;
bool isInstantiationPattern = false;
public:
LayoutStringBuilder() = default;
@@ -141,73 +156,35 @@ public:
refCountings.push_back(op);
}
void addResilientRefCount(llvm::Function *metaTypeRef) {
RefCounting op;
op.kind = RefCountingKind::Resilient;
op.metaTypeRef = metaTypeRef;
refCountings.push_back(op);
isInstantiationPattern = true;
}
public:
void addGenericRefCount(uint32_t genericIdx) {
RefCounting op;
op.kind = RefCountingKind::Generic;
op.genericIdx = genericIdx;
refCountings.push_back(op);
isInstantiationPattern = true;
}
void addAlignment(uint64_t alignment) {
RefCounting op;
op.kind = RefCountingKind::Alignment;
op.alignment = alignment;
refCountings.push_back(op);
}
void startDynamicAlignment() {
RefCounting op;
op.kind = RefCountingKind::DynamicAlignmentStart;
refCountings.push_back(op);
}
void endDynamicAlignment() {
RefCounting op;
op.kind = RefCountingKind::DynamicAlignmentEnd;
refCountings.push_back(op);
}
private:
void generateStatic(IRGenModule &IGM, ConstantStructBuilder &B) const {
void result(IRGenModule &IGM, ConstantStructBuilder &B) const {
auto flagsPlaceholder = B.addPlaceholderWithSize(IGM.Int64Ty);
auto sizePlaceholder = B.addPlaceholderWithSize(IGM.SizeTy);
size_t skip = 0;
size_t refCountBytes = 0;
LayoutStringFlags flags = LayoutStringFlags::Empty;
for (auto &refCounting : refCountings) {
switch (refCounting.kind) {
case RefCountingKind::Skip:
skip += refCounting.size;
break;
case RefCountingKind::Alignment: {
break;
}
case RefCountingKind::Resilient: {
uint64_t op = (static_cast<uint64_t>(refCounting.kind) << 56) | skip;
B.addInt64(op);
B.add(refCounting.metaTypeRef);
// We are not using a compact pointer here, because when instantiating
// a layout string, we determine the size of the new string by the
// size of its parts. On instantiation we also resolve this pointer
// to a full metadata pointer, which always has full pointer size.
B.addRelativeOffset(IGM.IntPtrTy, refCounting.metaTypeRef);
refCountBytes += sizeof(uint64_t) + IGM.getPointerSize().getValue();
flags |= LayoutStringFlags::HasRelativePointers;
skip = 0;
break;
}
case RefCountingKind::DynamicAlignmentStart:
case RefCountingKind::DynamicAlignmentEnd:
case RefCountingKind::Generic: {
llvm_unreachable("Found dynamic operations in a static layout");
break;
}
default: {
uint64_t op = (static_cast<uint64_t>(refCounting.kind) << 56) | skip;
B.addInt64(op);
@@ -220,133 +197,11 @@ private:
}
// size of ref counting ops in bytes
B.fillPlaceholderWithInt(flagsPlaceholder, IGM.Int64Ty,
static_cast<uint64_t>(flags));
B.fillPlaceholderWithInt(sizePlaceholder, IGM.SizeTy, refCountBytes);
B.addInt64(skip);
}
void generateDynamic(IRGenModule &IGM, ConstantStructBuilder &B) const {
auto sizePlaceholder = B.addPlaceholderWithSize(IGM.SizeTy);
size_t instCopyBytes = 0;
std::vector<GenericInstOp> genericInstOps;
size_t skip = 0;
size_t refCountBytes = 0;
for (auto &refCounting : refCountings) {
switch (refCounting.kind) {
case RefCountingKind::Skip:
skip += refCounting.size;
break;
case RefCountingKind::Alignment: {
auto alignmentMask = refCounting.alignment - 1;
skip += alignmentMask;
skip &= ~alignmentMask;
break;
}
case RefCountingKind::DynamicAlignmentStart:
case RefCountingKind::DynamicAlignmentEnd: {
// GenericInstOp op;
// op.type = GenericInstOp::Type::DynamicAlignment;
// op.alignment = { skip, refCounting.metaTypeRef };
// genericInstOps.push_back(op);
// skip = 0;
break;
}
case RefCountingKind::Generic: {
if (instCopyBytes > 0) {
genericInstOps.push_back({GenericInstOp::Type::Copy, {instCopyBytes}});
instCopyBytes = 0;
}
GenericInstOp op;
op.type = GenericInstOp::Type::Param;
op.generic = {skip, refCounting.genericIdx};
genericInstOps.push_back(op);
skip = 0;
break;
}
case RefCountingKind::Resilient: {
if (instCopyBytes > 0) {
genericInstOps.push_back(
{GenericInstOp::Type::Copy, {instCopyBytes}});
instCopyBytes = 0;
}
GenericInstOp op;
op.type = GenericInstOp::Type::Resilient;
op.resilient = {skip, refCounting.metaTypeRef};
genericInstOps.push_back(op);
skip = 0;
break;
}
default: {
uint64_t op = (static_cast<uint64_t>(refCounting.kind) << 56) | skip;
B.addInt64(op);
instCopyBytes += 8;
refCountBytes += 8;
skip = refCounting.size;
break;
}
}
}
// size of ref counting ops in bytes
B.fillPlaceholderWithInt(sizePlaceholder, IGM.SizeTy, refCountBytes);
B.addInt64(skip);
if (!genericInstOps.empty()) {
if (instCopyBytes > 0) {
genericInstOps.push_back({GenericInstOp::Type::Copy, {instCopyBytes}});
}
for (auto &genOp : genericInstOps) {
switch (genOp.type) {
case GenericInstOp::Type::Copy: {
uint64_t op = ((uint64_t)genOp.type << 56) | genOp.size;
B.addInt64(op);
break;
}
case GenericInstOp::Type::Param: {
uint64_t op = ((uint64_t)genOp.type << 56) | genOp.generic.offset;
B.addInt64(op);
B.addInt32(genOp.generic.idx);
break;
}
case GenericInstOp::Type::Resilient: {
uint64_t op = ((uint64_t)genOp.type << 56) | genOp.resilient.offset;
B.addInt64(op);
B.addCompactFunctionReference(genOp.resilient.metaTypeRef);
break;
}
case GenericInstOp::Type::DynamicAlignment: {
uint64_t op = ((uint64_t)genOp.type << 56) | genOp.alignment.offset;
B.addInt64(op);
B.addCompactFunctionReference(genOp.alignment.metaTypeRef);
break;
}
}
}
}
}
public:
void result(IRGenModule &IGM, ConstantStructBuilder &B) const {
if (isInstantiationPattern) {
generateDynamic(IGM, B);
} else {
generateStatic(IGM, B);
}
// NUL terminator
B.addInt64(0);
@@ -372,7 +227,7 @@ ScalarKind swift::irgen::refcountingToScalarKind(ReferenceCounting refCounting)
case ReferenceCounting::None:
return ScalarKind::TriviallyDestroyable;
case ReferenceCounting::Custom:
return ScalarKind::UnknownReference;
return ScalarKind::CustomReference;
}
}
@@ -411,6 +266,7 @@ static std::string scalarToString(ScalarKind kind) {
case ScalarKind::BlockStorage: return "BlockStorage";
case ScalarKind::ThickFunc: return "ThickFunc";
case ScalarKind::ExistentialReference: return "ExistentialReference";
case ScalarKind::CustomReference: return "Custom";
}
}
@@ -1250,6 +1106,8 @@ bool ScalarTypeLayoutEntry::refCountString(IRGenModule &IGM,
case ScalarKind::ExistentialReference:
B.addRefCount(LayoutStringBuilder::RefCountingKind::Existential, size);
break;
case ScalarKind::CustomReference:
return false;
default:
llvm_unreachable("Unsupported ScalarKind");
}
@@ -1359,6 +1217,9 @@ void ScalarTypeLayoutEntry::destroy(IRGenFunction &IGF, Address addr) const {
emitDestroyBoxedOpaqueExistentialBuffer(IGF, representative, addr);
return;
}
case ScalarKind::CustomReference: {
typeInfo.destroy(IGF, addr, representative, true);
}
}
}
@@ -1663,10 +1524,12 @@ AlignedGroupEntry::layoutString(IRGenModule &IGM,
"type_layout_string", genericSig.getCanonicalSignature(),
ty.getASTType()->mapTypeOutOfContext()->getCanonicalType());
_layoutString = SB.finishAndCreateGlobal(symbolName, IGM.getPointerAlignment(),
/*constant*/ true);
auto *global = SB.finishAndCreateGlobal(symbolName, IGM.getPointerAlignment(),
/*constant*/ true);
IGM.setTrueConstGlobal(global);
_layoutString = global;
return *_layoutString;
return global;
}
bool AlignedGroupEntry::refCountString(IRGenModule &IGM, LayoutStringBuilder &B,
@@ -2041,37 +1904,12 @@ ArchetypeLayoutEntry::isBitwiseTakable(IRGenFunction &IGF) const {
llvm::Constant *
ArchetypeLayoutEntry::layoutString(IRGenModule &IGM,
GenericSignature genericSig) const {
if (_layoutString) {
return *_layoutString;
}
LayoutStringBuilder B{};
if (!refCountString(IGM, B, genericSig)) {
return *(_layoutString = llvm::Optional<llvm::Constant *>(nullptr));
}
ConstantInitBuilder IB(IGM);
auto SB = IB.beginStruct();
SB.setPacked(true);
B.result(IGM, SB);
_layoutString = SB.finishAndCreateGlobal("", IGM.getPointerAlignment(),
/*constant*/ true);
return *_layoutString;
return nullptr;
}
bool ArchetypeLayoutEntry::refCountString(IRGenModule &IGM,
LayoutStringBuilder &B,
GenericSignature genericSig) const {
auto archetypeType = dyn_cast<ArchetypeType>(archetype.getASTType());
auto params = archetypeType->getGenericEnvironment()->getGenericParams();
for (auto param : params) {
B.addGenericRefCount(param->getIndex());
}
return false;
}
@@ -2215,9 +2053,13 @@ EnumTypeLayoutEntry::layoutString(IRGenModule &IGM,
"type_layout_string", genericSig.getCanonicalSignature(),
ty.getASTType()->mapTypeOutOfContext()->getCanonicalType());
_layoutString = SB.finishAndCreateGlobal(symbolName, IGM.getPointerAlignment(),
auto *global = SB.finishAndCreateGlobal(symbolName, IGM.getPointerAlignment(),
/*constant*/ true);
return *_layoutString;
IGM.setTrueConstGlobal(global);
_layoutString = global;
return global;
}
}
}
@@ -2225,6 +2067,8 @@ EnumTypeLayoutEntry::layoutString(IRGenModule &IGM,
bool EnumTypeLayoutEntry::refCountString(IRGenModule &IGM,
LayoutStringBuilder &B,
GenericSignature genericSig) const {
if (!isFixedSize(IGM)) return false;
switch (copyDestroyKind(IGM)) {
case CopyDestroyStrategy::TriviallyDestroyable: {
auto size = fixedSize(IGM);
@@ -2236,11 +2080,6 @@ bool EnumTypeLayoutEntry::refCountString(IRGenModule &IGM,
case CopyDestroyStrategy::ForwardToPayload:
return cases[0]->refCountString(IGM, B, genericSig);
case CopyDestroyStrategy::Normal: {
if (!isFixedSize(IGM)) {
// B.addResilientRefCount(accessor);
return false;
}
auto *accessor = createMetatypeAccessorFunction(IGM, ty, genericSig);
B.addFixedEnumRefCount(accessor);
B.addSkip(fixedSize(IGM)->getValue());
@@ -3393,29 +3232,13 @@ ResilientTypeLayoutEntry::isBitwiseTakable(IRGenFunction &IGF) const {
llvm::Constant *
ResilientTypeLayoutEntry::layoutString(IRGenModule &IGM,
GenericSignature genericSig) const {
LayoutStringBuilder B{};
if (!refCountString(IGM, B, genericSig)) {
return nullptr;
}
ConstantInitBuilder IB(IGM);
auto SB = IB.beginStruct();
B.result(IGM, SB);
return SB.finishAndCreateGlobal("", IGM.getPointerAlignment(),
/*constant*/ true);
return nullptr;
}
bool ResilientTypeLayoutEntry::refCountString(
IRGenModule &IGM, LayoutStringBuilder &B,
GenericSignature genericSig) const {
auto *accessor = createMetatypeAccessorFunction(IGM, ty, genericSig);
B.addResilientRefCount(accessor);
return false;
//return true;
}
void ResilientTypeLayoutEntry::computeProperties() {

View File

@@ -51,6 +51,7 @@ enum class ScalarKind : uint8_t {
BlockStorage,
ThickFunc,
ExistentialReference,
CustomReference,
};
/// Convert a ReferenceCounting into the appropriate Scalar reference

View File

@@ -17,8 +17,8 @@
//===----------------------------------------------------------------------===//
#include "BytecodeLayouts.h"
#include "../../public/runtime/WeakReference.h"
#include "../../public/SwiftShims/swift/shims/HeapObject.h"
#include "WeakReference.h"
#include "../SwiftShims/swift/shims/HeapObject.h"
#include "swift/ABI/MetadataValues.h"
#include "swift/ABI/System.h"
#include "swift/Runtime/Error.h"
@@ -35,7 +35,7 @@
using namespace swift;
static const size_t layoutStringHeaderSize = sizeof(size_t);
static const size_t layoutStringHeaderSize = sizeof(uint64_t) + sizeof(size_t);
/// Given a pointer and an offset, read the requested data and increment the
/// offset
@@ -59,16 +59,19 @@ Metadata *getExistentialTypeMetadata(OpaqueValue *object) {
typedef Metadata* (*MetadataAccessor)(const Metadata* const *);
const Metadata *getResilientTypeMetadata(const Metadata* metadata, const uint8_t *layoutStr, size_t &offset) {
auto fnPtr = readBytes<uintptr_t>(layoutStr, offset);
const Metadata *getResilientTypeMetadata(const Metadata* metadata,
const uint8_t *layoutStr,
size_t &offset) {
auto absolute = layoutStr + offset;
auto relativeOffset = (uintptr_t)(intptr_t)(int32_t)readBytes<intptr_t>(layoutStr, offset);
MetadataAccessor fn;
#if SWIFT_PTRAUTH
fn = (MetadataAccessor)ptrauth_sign_unauthenticated(
(void *)(fnPtr),
(void *)((uintptr_t)absolute + relativeOffset),
ptrauth_key_function_pointer, 0);
#else
fn = (MetadataAccessor)(fnPtr);
fn = (MetadataAccessor)((uintptr_t)absolute + relativeOffset);
#endif
return fn(metadata->getGenericArgs());
@@ -78,7 +81,6 @@ typedef void (*DestrFn)(void*);
struct DestroyFuncAndMask {
DestrFn fn;
uintptr_t mask;
bool isIndirect;
};
@@ -94,27 +96,27 @@ void existential_destroy(OpaqueValue* object) {
}
const DestroyFuncAndMask destroyTable[] = {
{(DestrFn)&skipDestroy, UINTPTR_MAX, false},
{(DestrFn)&swift_errorRelease, UINTPTR_MAX, true},
{(DestrFn)&swift_release, ~heap_object_abi::SwiftSpareBitsMask, true},
{(DestrFn)&swift_unownedRelease, ~heap_object_abi::SwiftSpareBitsMask, true},
{(DestrFn)&swift_weakDestroy, UINTPTR_MAX, false},
{(DestrFn)&swift_unknownObjectRelease, ~heap_object_abi::SwiftSpareBitsMask, true},
{(DestrFn)&swift_unknownObjectUnownedDestroy, UINTPTR_MAX, false},
{(DestrFn)&swift_unknownObjectWeakDestroy, UINTPTR_MAX, false},
{(DestrFn)&swift_bridgeObjectRelease, ~heap_object_abi::SwiftSpareBitsMask, true},
{(DestrFn)&skipDestroy, false},
{(DestrFn)&swift_errorRelease, true},
{(DestrFn)&swift_release, true},
{(DestrFn)&swift_unownedRelease, true},
{(DestrFn)&swift_weakDestroy, false},
{(DestrFn)&swift_unknownObjectRelease, true},
{(DestrFn)&swift_unknownObjectUnownedDestroy, false},
{(DestrFn)&swift_unknownObjectWeakDestroy, false},
{(DestrFn)&swift_bridgeObjectRelease, true},
#if SWIFT_OBJC_INTEROP
{(DestrFn)&_Block_release, UINTPTR_MAX, true},
{(DestrFn)&swift_unknownObjectRelease, UINTPTR_MAX, true},
{(DestrFn)&_Block_release, true},
{(DestrFn)&swift_unknownObjectRelease, true},
#else
{nullptr, UINTPTR_MAX, true},
{nullptr, UINTPTR_MAX, true},
{nullptr, true},
{nullptr, true},
#endif
// TODO: how to handle Custom?
{nullptr, UINTPTR_MAX, true},
{nullptr, UINTPTR_MAX, true},
{nullptr, UINTPTR_MAX, true},
{(DestrFn)&existential_destroy, UINTPTR_MAX, false},
{nullptr, true},
{nullptr, true},
{nullptr, true},
{(DestrFn)&existential_destroy, false},
};
extern "C" void
@@ -135,8 +137,7 @@ swift_generic_destroy(swift::OpaqueValue *address, const Metadata *metadata) {
if (SWIFT_UNLIKELY(tag == RefCountingKind::End)) {
return;
} else if (SWIFT_UNLIKELY(tag == RefCountingKind::Metatype)) {
auto typePtr = readBytes<uintptr_t>(typeLayout, offset);
auto *type = reinterpret_cast<Metadata*>(typePtr);
auto *type = readBytes<const Metadata*>(typeLayout, offset);
type->vw_destroy((OpaqueValue *)(addr + addrOffset));
} else if (SWIFT_UNLIKELY(tag == RefCountingKind::Resilient)) {
auto *type = getResilientTypeMetadata(metadata, typeLayout, offset);
@@ -155,7 +156,6 @@ swift_generic_destroy(swift::OpaqueValue *address, const Metadata *metadata) {
struct RetainFuncAndMask {
void* fn;
uintptr_t mask;
bool isSingle;
};
@@ -172,35 +172,37 @@ typedef void* (*CopyInitFn)(void*, void*);
void* skipRetain(void* ignore) { return nullptr; }
void* existential_initializeWithCopy(OpaqueValue* dest, OpaqueValue* src) {
auto* metadata = getExistentialTypeMetadata(src);
return metadata->vw_initializeBufferWithCopyOfBuffer((ValueBuffer*)dest, (ValueBuffer*)src);
return metadata->vw_initializeBufferWithCopyOfBuffer((ValueBuffer*)dest,
(ValueBuffer*)src);
}
const RetainFuncAndMask retainTable[] = {
{(void*)&skipRetain, UINTPTR_MAX, true},
{(void*)&swift_errorRetain, UINTPTR_MAX, true},
{(void*)&swift_retain, ~heap_object_abi::SwiftSpareBitsMask, true},
{(void*)&swift_unownedRetain, ~heap_object_abi::SwiftSpareBitsMask, true},
{(void*)&swift_weakCopyInit, UINTPTR_MAX, false},
{(void*)&swift_unknownObjectRetain, ~heap_object_abi::SwiftSpareBitsMask, true},
{(void*)&swift_unknownObjectUnownedCopyInit, UINTPTR_MAX, false},
{(void*)&swift_unknownObjectWeakCopyInit, UINTPTR_MAX, false},
{(void*)&swift_bridgeObjectRetain, ~heap_object_abi::SwiftSpareBitsMask, true},
{(void*)&skipRetain, true},
{(void*)&swift_errorRetain, true},
{(void*)&swift_retain, true},
{(void*)&swift_unownedRetain, true},
{(void*)&swift_weakCopyInit, false},
{(void*)&swift_unknownObjectRetain, true},
{(void*)&swift_unknownObjectUnownedCopyInit, false},
{(void*)&swift_unknownObjectWeakCopyInit, false},
{(void*)&swift_bridgeObjectRetain, true},
#if SWIFT_OBJC_INTEROP
{(void*)&Block_copyForwarder, UINTPTR_MAX, false},
{(void*)&objc_retain, UINTPTR_MAX, true},
{(void*)&Block_copyForwarder, false},
{(void*)&objc_retain, true},
#else
{nullptr, UINTPTR_MAX, true},
{nullptr, UINTPTR_MAX, true},
{nullptr, true},
{nullptr, true},
#endif
// TODO: how to handle Custom?
{nullptr, UINTPTR_MAX, true},
{nullptr, UINTPTR_MAX, true},
{nullptr, UINTPTR_MAX, true},
{(void*)&existential_initializeWithCopy, UINTPTR_MAX, false},
{nullptr, true},
{nullptr, true},
{nullptr, true},
{(void*)&existential_initializeWithCopy, false},
};
extern "C" swift::OpaqueValue *
swift_generic_initWithCopy(swift::OpaqueValue *dest, swift::OpaqueValue *src, const Metadata *metadata) {
swift_generic_initWithCopy(swift::OpaqueValue *dest, swift::OpaqueValue *src,
const Metadata *metadata) {
uintptr_t addrOffset = 0;
const uint8_t *typeLayout = metadata->getLayoutString();
@@ -219,8 +221,7 @@ swift_generic_initWithCopy(swift::OpaqueValue *dest, swift::OpaqueValue *src, co
if (SWIFT_UNLIKELY(tag == RefCountingKind::End)) {
return dest;
} else if (SWIFT_UNLIKELY(tag == RefCountingKind::Metatype)) {
auto typePtr = readBytes<uintptr_t>(typeLayout, offset);
auto *type = reinterpret_cast<Metadata*>(typePtr);
auto *type = readBytes<const Metadata*>(typeLayout, offset);
type->vw_initializeWithCopy((OpaqueValue*)((uintptr_t)dest + addrOffset),
(OpaqueValue*)((uintptr_t)src + addrOffset));
} else if (SWIFT_UNLIKELY(tag == RefCountingKind::Resilient)) {
@@ -232,14 +233,16 @@ swift_generic_initWithCopy(swift::OpaqueValue *dest, swift::OpaqueValue *src, co
if (SWIFT_LIKELY(retainFunc.isSingle)) {
((RetainFn)retainFunc.fn)(*(void**)(((uintptr_t)dest + addrOffset)));
} else {
((CopyInitFn)retainFunc.fn)((void*)((uintptr_t)dest + addrOffset), (void*)((uintptr_t)src + addrOffset));
((CopyInitFn)retainFunc.fn)((void*)((uintptr_t)dest + addrOffset),
(void*)((uintptr_t)src + addrOffset));
}
}
}
}
extern "C" swift::OpaqueValue *
swift_generic_initWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src, const Metadata *metadata) {
swift_generic_initWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src,
const Metadata *metadata) {
const uint8_t *typeLayout = metadata->getLayoutString();
size_t size = metadata->vw_size();
@@ -260,23 +263,26 @@ swift_generic_initWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src, co
switch (tag) {
case RefCountingKind::UnknownWeak:
swift_unknownObjectWeakTakeInit((WeakReference*)((uintptr_t)dest + addrOffset),
(WeakReference*)((uintptr_t)src + addrOffset));
swift_unknownObjectWeakTakeInit(
(WeakReference*)((uintptr_t)dest + addrOffset),
(WeakReference*)((uintptr_t)src + addrOffset));
break;
case RefCountingKind::Metatype: {
auto typePtr = readBytes<uintptr_t>(typeLayout, offset);
auto *type = reinterpret_cast<Metadata*>(typePtr);
auto *type = readBytes<const Metadata*>(typeLayout, offset);
if (SWIFT_UNLIKELY(!type->getValueWitnesses()->isBitwiseTakable())) {
type->vw_initializeWithTake((OpaqueValue*)((uintptr_t)dest + addrOffset),
(OpaqueValue*)((uintptr_t)src + addrOffset));
type->vw_initializeWithTake(
(OpaqueValue*)((uintptr_t)dest + addrOffset),
(OpaqueValue*)((uintptr_t)src + addrOffset));
}
break;
}
case RefCountingKind::Existential: {
auto *type = getExistentialTypeMetadata((OpaqueValue*)((uintptr_t)src + addrOffset));
auto *type = getExistentialTypeMetadata(
(OpaqueValue*)((uintptr_t)src + addrOffset));
if (SWIFT_UNLIKELY(!type->getValueWitnesses()->isBitwiseTakable())) {
type->vw_initializeWithTake((OpaqueValue*)((uintptr_t)dest + addrOffset),
(OpaqueValue*)((uintptr_t)src + addrOffset));
type->vw_initializeWithTake(
(OpaqueValue*)((uintptr_t)dest + addrOffset),
(OpaqueValue*)((uintptr_t)src + addrOffset));
}
break;
}
@@ -299,165 +305,50 @@ swift_generic_initWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src, co
}
extern "C" swift::OpaqueValue *
swift_generic_assignWithCopy(swift::OpaqueValue *dest, swift::OpaqueValue *src, const Metadata *metadata) {
swift_generic_assignWithCopy(swift::OpaqueValue *dest, swift::OpaqueValue *src,
const Metadata *metadata) {
swift_generic_destroy(dest, metadata);
return swift_generic_initWithCopy(dest, src, metadata);
}
extern "C" swift::OpaqueValue *
swift_generic_assignWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src, const Metadata *metadata) {
swift_generic_assignWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src,
const Metadata *metadata) {
swift_generic_destroy(dest, metadata);
return swift_generic_initWithTake(dest, src, metadata);
}
extern "C" void
swift_generic_instantiateLayoutString(const uint8_t* layoutStr,
Metadata* type) {
size_t offset = 0;
const auto refCountSize = readBytes<size_t>(layoutStr, offset);
void swift::swift_resolve_resilientAccessors(
uint8_t *layoutStr, size_t layoutStrOffset, const uint8_t *fieldLayoutStr,
size_t refCountBytes, const Metadata *fieldType) {
size_t i = layoutStringHeaderSize;
while (i < (layoutStringHeaderSize + refCountBytes)) {
size_t currentOffset = i;
uint64_t size = readBytes<uint64_t>(fieldLayoutStr, i);
RefCountingKind tag = (RefCountingKind)(size >> 56);
size &= ~(0xffULL << 56);
const size_t genericDescOffset = layoutStringHeaderSize + refCountSize + sizeof(size_t);
offset = genericDescOffset;
size_t genericRefCountSize = 0;
while (true) {
const auto tagAndOffset = readBytes<uint64_t>(layoutStr, offset);
const auto tag = (uint8_t)(tagAndOffset >> 56);
if (tag == 0) {
switch (tag) {
case RefCountingKind::Resilient: {
auto *type = getResilientTypeMetadata(fieldType, fieldLayoutStr,
i);
uint8_t *curPos = (layoutStr + layoutStrOffset + currentOffset - layoutStringHeaderSize);
*((uint64_t*)curPos) =
(((uint64_t)RefCountingKind::Metatype) << 56) | size;
*((Metadata const* *)(curPos + sizeof(uint64_t))) = type;
break;
}
case RefCountingKind::Metatype:
i += sizeof(uintptr_t);
break;
default:
break;
} else if (tag == 1 || tag == 4) {
continue;
} else {
const Metadata *genericType;
if (tag == 2) {
auto index = readBytes<uint32_t>(layoutStr, offset);
genericType = type->getGenericArgs()[index];
} else {
genericType = getResilientTypeMetadata(type, layoutStr, offset);
}
if (genericType->getTypeContextDescriptor()->hasLayoutString()) {
const uint8_t *genericLayoutStr = genericType->getLayoutString();
size_t countOffset = 0;
genericRefCountSize += readBytes<size_t>(genericLayoutStr, countOffset);
} else if (genericType->isClassObject()) {
genericRefCountSize += sizeof(uint64_t);
} else {
genericRefCountSize += sizeof(uint64_t) + sizeof(uintptr_t);
}
}
}
const auto instancedLayoutStrSize = layoutStringHeaderSize + refCountSize + genericRefCountSize + sizeof(size_t) + 1;
uint8_t *instancedLayoutStr = (uint8_t*)calloc(instancedLayoutStrSize, sizeof(uint8_t));
writeBytes<size_t>(instancedLayoutStr, 0, refCountSize + genericRefCountSize);
offset = genericDescOffset;
size_t layoutStrOffset = layoutStringHeaderSize;
size_t instancedLayoutStrOffset = layoutStringHeaderSize;
size_t skipBytes = 0;
while (true) {
const auto tagAndOffset = readBytes<uint64_t>(layoutStr, offset);
const auto tag = (uint8_t)(tagAndOffset >> 56);
const auto sizeOrOffset = tagAndOffset & ~(0xffULL << 56);
if (tag == 0) {
break;
} else if (tag == 1) {
memcpy((void*)(instancedLayoutStr + instancedLayoutStrOffset), (void*)(layoutStr + layoutStrOffset), sizeOrOffset);
if (skipBytes) {
size_t firstRCOffset = instancedLayoutStrOffset;
auto firstRC = readBytes<uint64_t>(instancedLayoutStr, firstRCOffset);
firstRCOffset = instancedLayoutStrOffset;
firstRC += skipBytes;
writeBytes(instancedLayoutStr, firstRCOffset, firstRC);
skipBytes = 0;
}
layoutStrOffset += sizeOrOffset;
instancedLayoutStrOffset += sizeOrOffset;
} else if (tag == 4) {
auto *alignmentType = getResilientTypeMetadata(type, layoutStr, offset);
auto alignment = alignmentType->vw_alignment();
auto alignmentMask = alignment - 1;
skipBytes += sizeOrOffset;
skipBytes += alignmentMask;
skipBytes &= ~alignmentMask;
} else {
skipBytes += sizeOrOffset;
const Metadata *genericType;
if (tag == 2) {
auto index = readBytes<uint32_t>(layoutStr, offset);
genericType = type->getGenericArgs()[index];
} else {
genericType = getResilientTypeMetadata(type, layoutStr, offset);
}
if (genericType->getTypeContextDescriptor()->hasLayoutString()) {
const uint8_t *genericLayoutStr = genericType->getLayoutString();
size_t countOffset = 0;
auto genericRefCountSize = readBytes<size_t>(genericLayoutStr, countOffset);
if (genericRefCountSize > 0) {
memcpy((void*)(instancedLayoutStr + instancedLayoutStrOffset), (void*)(genericLayoutStr + layoutStringHeaderSize), genericRefCountSize);
if (skipBytes) {
size_t firstRCOffset = instancedLayoutStrOffset;
auto firstRC = readBytes<uint64_t>(instancedLayoutStr, firstRCOffset);
firstRC += skipBytes;
writeBytes(instancedLayoutStr, firstRCOffset, firstRC);
skipBytes = 0;
}
instancedLayoutStrOffset += genericRefCountSize;
size_t trailingBytesOffset = layoutStringHeaderSize + genericRefCountSize;
skipBytes += readBytes<size_t>(genericLayoutStr, trailingBytesOffset);
}
} else if (genericType->isClassObject()) {
uint64_t op = static_cast<uint64_t>(RefCountingKind::Unknown) << 56;
op |= (skipBytes & ~(0xffULL << 56));
writeBytes<uint64_t>(instancedLayoutStr, instancedLayoutStrOffset, op);
instancedLayoutStrOffset += sizeof(uint64_t);
skipBytes = sizeof(uintptr_t);
} else {
const ValueWitnessTable *vwt = genericType->getValueWitnesses();
if (vwt->isPOD()) {
skipBytes += vwt->getSize();
continue;
}
uint64_t op = static_cast<uint64_t>(RefCountingKind::Metatype) << 56;
op |= (skipBytes & ~(0xffULL << 56));
writeBytes<uint64_t>(instancedLayoutStr, instancedLayoutStrOffset, op);
instancedLayoutStrOffset += sizeof(uint64_t);
writeBytes<uintptr_t>(instancedLayoutStr, instancedLayoutStrOffset, reinterpret_cast<uintptr_t>(genericType));
instancedLayoutStrOffset += sizeof(uintptr_t);
skipBytes = 0;
}
}
};
// TODO: this should not really happen once we instantiate resilient types
if (instancedLayoutStrOffset == layoutStringHeaderSize) {
free(instancedLayoutStr);
type->setLayoutString(layoutStr);
return;
}
size_t trailingBytesOffset = layoutStringHeaderSize + refCountSize;
skipBytes += readBytes<uint64_t>(layoutStr, trailingBytesOffset);
if (skipBytes > 0) {
writeBytes<size_t>(instancedLayoutStr, layoutStringHeaderSize + refCountSize + genericRefCountSize, skipBytes);
}
type->setLayoutString(instancedLayoutStr);
}
extern "C"
void swift_generic_instantiateLayoutString(const uint8_t* layoutStr,
Metadata* type) {
type->setLayoutString(layoutStr);
}

View File

@@ -49,15 +49,6 @@ enum class RefCountingKind : uint8_t {
// Reserved: 0x81 - 0xFF
};
// The implemenation of this should be provided by the stdlib when we link this
// into an executable/library.
SWIFT_RUNTIME_EXPORT
SWIFT_CC(swift)
const Metadata *swift_getTypeByMangledNameInContext(
const char *typeNameStart, size_t typeNameLength,
const TargetContextDescriptor<InProcess> *context,
const void *const *genericArgs);
SWIFT_RUNTIME_EXPORT
void swift_generic_destroy(swift::OpaqueValue *address, const Metadata *metadata);
SWIFT_RUNTIME_EXPORT
@@ -69,7 +60,9 @@ swift::OpaqueValue *swift_generic_initWithCopy(swift::OpaqueValue *dest, swift::
SWIFT_RUNTIME_EXPORT
swift::OpaqueValue *swift_generic_initWithTake(swift::OpaqueValue *dest, swift::OpaqueValue *src, const Metadata *metadata);
SWIFT_RUNTIME_EXPORT
void swift_generic_instantiateLayoutString(const uint8_t* layoutStr, Metadata* type);
void swift_generic_instantiateLayoutString(const uint8_t *layoutStr, Metadata *type);
void swift_resolve_resilientAccessors(uint8_t *layoutStr, size_t layoutStrOffset, const uint8_t *fieldLayoutStr, size_t refCountBytes, const Metadata *fieldType);
} // namespace swift
#endif // SWIFT_BYTECODE_LAYOUTS_H

View File

@@ -2012,7 +2012,7 @@ static void performBasicLayout(TypeLayout &layout,
auto &elt = elements[i];
// Lay out this element.
const TypeLayout *eltLayout = getLayout(elt);
const TypeLayout *eltLayout = getLayout(i, elt);
size = roundUpToAlignMask(size, eltLayout->flags.getAlignmentMask());
// Report this record to the functor.
@@ -2067,7 +2067,7 @@ void swift::swift_getTupleTypeLayout(TypeLayout *result,
*result = TypeLayout();
unsigned numExtraInhabitants = 0;
performBasicLayout(*result, elements, flags.getNumElements(),
[](const TypeLayout *elt) { return elt; },
[](size_t i, const TypeLayout *elt) { return elt; },
[elementOffsets, &numExtraInhabitants]
(size_t i, const TypeLayout *elt, size_t offset) {
if (elementOffsets)
@@ -2202,7 +2202,7 @@ TupleCacheEntry::tryInitialize(Metadata *metadata,
// Perform basic layout on the tuple.
auto layout = getInitialLayoutForValueType();
performBasicLayout(layout, Data.getElements(), Data.NumElements,
[](const TupleTypeMetadata::Element &elt) {
[](size_t i, const TupleTypeMetadata::Element &elt) {
return elt.getTypeLayout();
},
[](size_t i, TupleTypeMetadata::Element &elt, size_t offset) {
@@ -2604,7 +2604,7 @@ void swift::swift_initStructMetadata(StructMetadata *structType,
auto layout = getInitialLayoutForValueType();
performBasicLayout(
layout, fieldTypes, numFields,
[&](const TypeLayout *fieldType) { return fieldType; },
[&](size_t i, const TypeLayout *fieldType) { return fieldType; },
[&](size_t i, const TypeLayout *fieldType, uint32_t offset) {
assignUnlessEqual(fieldOffsets[i], offset);
});
@@ -2629,22 +2629,41 @@ void swift::swift_initStructMetadata(StructMetadata *structType,
vwtable->publishLayout(layout);
}
const TypeLayout unknownWeakTypeLayout =
TypeLayout(sizeof(uint8_t *), alignof(uint8_t *), {}, 0);
enum LayoutStringFlags : uint64_t {
Empty = 0,
// TODO: Track other useful information tha can be used to optimize layout
// strings, like different reference kinds contained in the string
// number of ref counting operations (maybe up to 4), so we can
// use witness functions optimized for these cases.
HasRelativePointers = (1ULL << 63),
};
inline bool operator&(LayoutStringFlags a, LayoutStringFlags b) {
return (uint64_t(a) & uint64_t(b)) != 0;
}
inline LayoutStringFlags operator|(LayoutStringFlags a, LayoutStringFlags b) {
return LayoutStringFlags(uint64_t(a) | uint64_t(b));
}
inline LayoutStringFlags &operator|=(LayoutStringFlags &a, LayoutStringFlags b) {
return a = (a | b);
}
void swift::swift_initStructMetadataWithLayoutString(
StructMetadata *structType, StructLayoutFlags layoutFlags, size_t numFields,
const Metadata *const *fieldTypes, uint32_t *fieldOffsets) {
const uint8_t *const *fieldTypes, const uint8_t *fieldTags,
uint32_t *fieldOffsets) {
assert(structType->hasLayoutString());
auto layout = getInitialLayoutForValueType();
performBasicLayout(
layout, fieldTypes, numFields,
[&](const Metadata *fieldType) {
if (((uintptr_t)fieldType) == 0x7) {
return &unknownWeakTypeLayout;
[&](size_t i, const uint8_t *fieldType) {
if (fieldTags[i]) {
return (const TypeLayout*)fieldType;
}
return fieldType->getTypeLayout();
return ((const Metadata*)fieldType)->getTypeLayout();
},
[&](size_t i, const Metadata *fieldType, uint32_t offset) {
[&](size_t i, const uint8_t *fieldType, uint32_t offset) {
assignUnlessEqual(fieldOffsets[i], offset);
});
@@ -2653,89 +2672,182 @@ void swift::swift_initStructMetadataWithLayoutString(
// Compute total combined size of the layout string
size_t refCountBytes = 0;
for (unsigned i = 0; i < numFields; ++i) {
const Metadata *fieldType = fieldTypes[i];
unsigned fieldExtraInhabitantCount =
fieldType->vw_getNumExtraInhabitants();
if (((uintptr_t)fieldType) == 0x7) {
refCountBytes += sizeof(uint64_t);
auto fieldTag = fieldTags[i];
if (fieldTag) {
if (fieldTag <= 0x4) {
refCountBytes += sizeof(uint64_t);
}
const TypeLayout *fieldType = (const TypeLayout*)fieldTypes[i];
unsigned fieldExtraInhabitantCount = fieldType->getNumExtraInhabitants();
if (fieldExtraInhabitantCount > extraInhabitantCount) {
extraInhabitantCount = fieldExtraInhabitantCount;
}
continue;
}
const Metadata *fieldType = (const Metadata*)fieldTypes[i];
unsigned fieldExtraInhabitantCount =
fieldType->vw_getNumExtraInhabitants();
if (fieldExtraInhabitantCount > extraInhabitantCount) {
extraInhabitantCount = fieldExtraInhabitantCount;
}
if (fieldType->getValueWitnesses()->isPOD()) {
if (fieldType->vw_size() == 0) {
continue;
} else if (fieldType->getValueWitnesses()->isPOD()) {
// no extra space required for POD
} else if (fieldType->hasLayoutString()) {
refCountBytes += *(const size_t *)fieldType->getLayoutString();
} else if (fieldType->isClassObject()) {
refCountBytes += *(const size_t *)(fieldType->getLayoutString() +
sizeof(uint64_t));
} else if (fieldType->isClassObject() || fieldType->isAnyExistentialType()) {
refCountBytes += sizeof(uint64_t);
} else {
refCountBytes += sizeof(uint64_t) + sizeof(uintptr_t);
}
}
const size_t fixedLayoutStringSize = sizeof(size_t) + sizeof(uint64_t) * 2;
const size_t layoutStringHeaderSize = sizeof(uint64_t) + sizeof(size_t);
const size_t fixedLayoutStringSize = layoutStringHeaderSize +
sizeof(uint64_t) * 2;
uint8_t *layoutStr = (uint8_t *)malloc(fixedLayoutStringSize + refCountBytes);
uint8_t *layoutStr = (uint8_t *)malloc(fixedLayoutStringSize +
refCountBytes);
((size_t*)layoutStr)[0] = refCountBytes;
*((size_t*)(layoutStr + sizeof(uint64_t))) = refCountBytes;
size_t layoutStrOffset = sizeof(size_t);
size_t offset = 0;
size_t layoutStrOffset = layoutStringHeaderSize;
size_t fullOffset = 0;
size_t previousFieldOffset = 0;
LayoutStringFlags flags = LayoutStringFlags::Empty;
for (unsigned i = 0; i < numFields; ++i) {
const Metadata *fieldType = fieldTypes[i];
size_t unalignedOffset = fullOffset;
if (((uintptr_t)fieldType) == 0x7) {
offset = roundUpToAlignMask(offset, alignof(uint8_t *));
*(uint64_t *)(layoutStr + layoutStrOffset) =
((uint64_t)RefCountingKind::UnknownWeak << 56) | offset;
layoutStrOffset += sizeof(uint64_t);
auto fieldTag = fieldTags[i];
if (fieldTag) {
const TypeLayout *fieldType = (const TypeLayout*)fieldTypes[i];
auto alignmentMask = fieldType->flags.getAlignmentMask();
fullOffset = roundUpToAlignMask(fullOffset, alignmentMask);
if (fieldTag <= 0x4) {
size_t offset = fullOffset - unalignedOffset + previousFieldOffset;
auto tag = fieldTag <= 0x2 ? RefCountingKind::UnknownUnowned :
RefCountingKind::UnknownWeak;
*(uint64_t *)(layoutStr + layoutStrOffset) =
((uint64_t)tag << 56) | offset;
layoutStrOffset += sizeof(uint64_t);
}
fullOffset += fieldType->size;
previousFieldOffset = fieldType->size;
offset = sizeof(uint8_t *);
continue;
}
if (offset) {
uint64_t alignmentMask = fieldType->vw_alignment() - 1;
uint64_t alignedOffset = offset + alignmentMask;
alignedOffset &= ~alignmentMask;
offset = alignedOffset;
}
const Metadata *fieldType = (const Metadata*)fieldTypes[i];
if (fieldType->getValueWitnesses()->isPOD()) {
fullOffset = roundUpToAlignMask(fullOffset, fieldType->vw_alignment() - 1);
size_t offset = fullOffset - unalignedOffset + previousFieldOffset;
if (fieldType->vw_size() == 0) {
continue;
} else if (fieldType->getValueWitnesses()->isPOD()) {
// No need to handle PODs
previousFieldOffset = offset + fieldType->vw_size();
fullOffset += fieldType->vw_size();
} else if (fieldType->hasLayoutString()) {
const uint8_t *fieldLayoutStr = fieldType->getLayoutString();
const size_t refCountBytes = *(const size_t *)fieldLayoutStr;
memcpy(layoutStr + layoutStrOffset, fieldLayoutStr + sizeof(size_t),
refCountBytes);
if (offset) {
*(uint64_t *)(layoutStr + layoutStrOffset + sizeof(size_t)) += offset;
offset = 0;
const LayoutStringFlags fieldFlags =
*(const LayoutStringFlags *)fieldLayoutStr;
const size_t fieldRefCountBytes =
*(const size_t *)(fieldLayoutStr + sizeof(uint64_t));
if (fieldRefCountBytes > 0) {
flags |= fieldFlags;
memcpy(layoutStr + layoutStrOffset, fieldLayoutStr + layoutStringHeaderSize,
fieldRefCountBytes);
if (fieldFlags & LayoutStringFlags::HasRelativePointers) {
swift_resolve_resilientAccessors(layoutStr, layoutStrOffset,
fieldLayoutStr, fieldRefCountBytes,
fieldType);
}
if (offset) {
*(uint64_t *)(layoutStr + layoutStrOffset) += offset;
}
previousFieldOffset = *(const uint64_t*)(fieldLayoutStr + layoutStringHeaderSize + fieldRefCountBytes);
layoutStrOffset += fieldRefCountBytes;
} else {
previousFieldOffset += fieldType->vw_size();
}
layoutStrOffset += refCountBytes;
} else if (fieldType->isClassObject()) {
fullOffset += fieldType->vw_size();
} else if (auto *cls = fieldType->getClassObject()) {
RefCountingKind tag;
if (!cls->isTypeMetadata()) {
#if SWIFT_OBJC_INTEROP
tag = RefCountingKind::ObjC;
#else
tag = RefCountingKind::Unknown;
#endif
} else {
auto *vwt = cls->getValueWitnesses();
if (vwt == &VALUE_WITNESS_SYM(Bo)) {
tag = RefCountingKind::NativeStrong;
} else if (vwt == &VALUE_WITNESS_SYM(BO)) {
#if SWIFT_OBJC_INTEROP
tag = RefCountingKind::ObjC;
#else
tag = RefCountingKind::Unknown;
#endif
} else if (vwt == &VALUE_WITNESS_SYM(Bb)) {
tag = RefCountingKind::Bridge;
} else {
goto metadata;
};
}
*(uint64_t*)(layoutStr + layoutStrOffset) =
((uint64_t)RefCountingKind::Unknown << 56) | offset;
((uint64_t)tag << 56) | offset;
layoutStrOffset += sizeof(uint64_t);
offset = 0;
previousFieldOffset = fieldType->vw_size();
fullOffset += previousFieldOffset;
} else if (fieldType->isAnyExistentialType()) {
auto tag = RefCountingKind::Existential;
*(uint64_t*)(layoutStr + layoutStrOffset) =
((uint64_t)tag << 56) | offset;
layoutStrOffset += sizeof(uint64_t);
previousFieldOffset = fieldType->vw_size();
fullOffset += previousFieldOffset;
} else {
metadata:
*(uint64_t*)(layoutStr + layoutStrOffset) =
((uint64_t)RefCountingKind::Metatype << 56) | offset;
*(uintptr_t*)(layoutStr + layoutStrOffset + sizeof(uint64_t)) = (uintptr_t)fieldType;
*(uintptr_t*)(layoutStr + layoutStrOffset + sizeof(uint64_t)) =
(uintptr_t)fieldType;
layoutStrOffset += sizeof(uint64_t) + sizeof(uintptr_t);
offset = 0;
previousFieldOffset = fieldType->vw_size();
fullOffset += previousFieldOffset;
}
offset += fieldType->vw_size();
}
*(uint64_t *)(layoutStr + layoutStrOffset) = offset;
*(uint64_t *)(layoutStr + layoutStrOffset) = previousFieldOffset;
*(uint64_t *)(layoutStr + layoutStrOffset + sizeof(uint64_t)) = 0;
// we mask out HasRelativePointers, because at this point they have all been
// resolved to metadata pointers
*(uint64_t *)(layoutStr) = ((uint64_t)flags) & ~((uint64_t)LayoutStringFlags::HasRelativePointers);
structType->setLayoutString(layoutStr);
auto *vwtable = getMutableVWTableForInit(structType, layoutFlags);
vwtable->destroy = swift_generic_destroy;
vwtable->initializeWithCopy = swift_generic_initWithCopy;
@@ -2749,8 +2861,6 @@ void swift::swift_initStructMetadataWithLayoutString(
installCommonValueWitnesses(layout, vwtable);
vwtable->publishLayout(layout);
structType->setLayoutString(layoutStr);
}
/***************************************************************************/

View File

@@ -153,12 +153,12 @@ CastsTests.test("Cast from Swift existential to Protocol") {
/// test these paths on Linux as well.
protocol P4 {}
CastsTests.test("struct -> Obj-C -> Protocol") {
struct S: P4 {
struct SFUUUHREEEEEFFFF: P4 {
let value: Int
let tracker = LifetimeTracked(13)
}
let a: P4 = S(value: 13)
let a: P4 = SFUUUHREEEEEFFFF(value: 13)
let b = _bridgeAnythingToObjectiveC(a)
let d = b as? Any

View File

@@ -308,6 +308,64 @@ public struct MultiPayloadEnumWrapper {
}
}
public struct ComplexNesting<A, B, C, D> {
let pre: Filler = Filler()
let a: NestedA<A>
let b: NestedB<B>
let c: NestedC<C>
let d: NestedD<D>
struct Filler {
let x: Int16 = 23
let y: Bool = false
}
struct NestedA<T> {
let x: Int = 32
let y: NestedB<T>
let z: Bool = false
init(y: T) {
self.y = NestedB(y: y)
}
}
struct NestedB<T> {
let x: Bool = false
let y: NestedC<T>
let z: Int = 32
init(y: T) {
self.y = NestedC(y: y)
}
}
enum NestedC<T> {
case a(Int, T, Bool)
case b(Int, Bool)
case c
init(y: T) {
self = .a(32, y, false)
}
}
struct NestedD<T> {
let x: Bool = false
let y: T
let z: Int = 32
}
public init(_ a: A, _ b: B, _ c: C, _ d: D) {
self.a = NestedA(y: a)
self.b = NestedB(y: b)
self.c = NestedC(y: c)
self.d = NestedD(y: d)
}
}
@inline(never)
public func testAssign<T>(_ ptr: UnsafeMutablePointer<T>, from x: T) {
ptr.pointee = x

View File

@@ -192,6 +192,34 @@ func testRecursive() {
testRecursive()
func testComplexNesting() {
let ptr = allocateInternalGenericPtr(of: ComplexNesting<Int, TestClass, TestClass, TestClass>.self)
do {
let x = TestClass()
testGenericInit(ptr, to: ComplexNesting<Int, TestClass, TestClass, TestClass>(34, x, x, x))
}
do {
let y = TestClass()
// CHECK: Before deinit
print("Before deinit")
// CHECK-NEXT: TestClass deinitialized!
testGenericAssign(ptr, from: ComplexNesting<Int, TestClass, TestClass, TestClass>(34, y, y, y))
}
// CHECK-NEXT: Before deinit
print("Before deinit")
// CHECK-NEXT: TestClass deinitialized!
testGenericDestroy(ptr, of: ComplexNesting<Int, TestClass, TestClass, TestClass>.self)
ptr.deallocate()
}
testComplexNesting()
enum TestEnum {
case empty
case nonEmpty(TestClass)
@@ -288,3 +316,44 @@ func testGenericResilient() {
}
testGenericResilient()
#if os(macOS)
import Foundation
@objc
final class ObjcClass: NSObject {
deinit {
print("ObjcClass deinitialized!")
}
}
func testGenericObjc() {
let ptr = allocateInternalGenericPtr(of: ObjcClass.self)
do {
let x = ObjcClass()
testGenericInit(ptr, to: x)
}
do {
let y = ObjcClass()
// CHECK-macosx: Before deinit
print("Before deinit")
// CHECK-macosx-NEXT: ObjcClass deinitialized!
testGenericAssign(ptr, from: y)
}
// CHECK-macosx-NEXT: Before deinit
print("Before deinit")
// CHECK-macosx-NEXT: ObjcClass deinitialized!
testGenericDestroy(ptr, of: ObjcClass.self)
ptr.deallocate()
}
testGenericObjc()
#endif