Adopt malloc_type for allocating Swift objects from runtime (#63226)

Adopt malloc_type for allocating Swift objects from runtime

Radar-Id: rdar://98998492
This commit is contained in:
Julian Lettner
2023-02-02 17:08:41 -08:00
committed by GitHub
parent 2b542cba37
commit 5ad0331e55
4 changed files with 149 additions and 4 deletions

View File

@@ -129,6 +129,16 @@
#error Masking ISAs are incompatible with opaque ISAs
#endif
#if defined(__APPLE__) && defined(__LP64__) && __has_include(<malloc_type_private.h>) && SWIFT_STDLIB_HAS_DARWIN_LIBMALLOC
# include <TargetConditionals.h>
# if TARGET_OS_IOS && !TARGET_OS_SIMULATOR
# define SWIFT_STDLIB_HAS_MALLOC_TYPE 1
# endif
#endif
#ifndef SWIFT_STDLIB_HAS_MALLOC_TYPE
# define SWIFT_STDLIB_HAS_MALLOC_TYPE 0
#endif
/// Which bits in the class metadata are used to distinguish Swift classes
/// from ObjC classes?
#ifndef SWIFT_CLASS_IS_SWIFT_MASK

View File

@@ -32,6 +32,11 @@ namespace swift {
SWIFT_EXTERN_C SWIFT_RETURNS_NONNULL SWIFT_NODISCARD SWIFT_RUNTIME_EXPORT_ATTRIBUTE
void *swift_slowAlloc(size_t bytes, size_t alignMask);
using MallocTypeId = unsigned long long;
SWIFT_RETURNS_NONNULL SWIFT_NODISCARD
void *swift_slowAllocTyped(size_t bytes, size_t alignMask, MallocTypeId typeId);
// If the caller cannot promise to zero the object during destruction,
// then call these corresponding APIs:
SWIFT_RUNTIME_EXPORT

View File

@@ -73,7 +73,11 @@ static inline malloc_zone_t *DEFAULT_ZONE() {
// the use of AlignedAlloc. This allows manually allocated to memory to always
// be deallocated with AlignedFree without knowledge of its original allocation
// alignment.
//
static size_t computeAlignment(size_t alignMask) {
return (alignMask == ~(size_t(0))) ? _swift_MinAllocationAlignment
: alignMask + 1;
}
// For alignMask > (_minAllocationAlignment-1)
// i.e. alignment == 0 || alignment > _minAllocationAlignment:
// The runtime must use AlignedAlloc, and the standard library must
@@ -93,15 +97,32 @@ void *swift::swift_slowAlloc(size_t size, size_t alignMask) {
p = malloc(size);
#endif
} else {
size_t alignment = (alignMask == ~(size_t(0)))
? _swift_MinAllocationAlignment
: alignMask + 1;
size_t alignment = computeAlignment(alignMask);
p = AlignedAlloc(size, alignment);
}
if (!p) swift::crash("Could not allocate memory.");
return p;
}
void *swift::swift_slowAllocTyped(size_t size, size_t alignMask,
MallocTypeId typeId) {
#if SWIFT_STDLIB_HAS_MALLOC_TYPE
if (__builtin_available(macOS 9998, iOS 9998, tvOS 9998, watchOS 9998, *)) {
void *p;
// This check also forces "default" alignment to use malloc_memalign().
if (alignMask <= MALLOC_ALIGN_MASK) {
p = malloc_type_zone_malloc(DEFAULT_ZONE(), size, typeId);
} else {
size_t alignment = computeAlignment(alignMask);
p = malloc_type_zone_memalign(DEFAULT_ZONE(), alignment, size, typeId);
}
if (!p) swift::crash("Could not allocate memory.");
return p;
}
#endif
return swift_slowAlloc(size, alignMask);
}
// Unknown alignment is specified by passing alignMask == ~(size_t(0)), forcing
// the AlignedFree deallocation path for unknown alignment. The memory
// deallocated with unknown alignment must have been allocated with either

View File

@@ -44,6 +44,9 @@
# include "swift/Runtime/ObjCBridge.h"
# include <dlfcn.h>
#endif
#if SWIFT_STDLIB_HAS_MALLOC_TYPE
# include <malloc_type_private.h>
#endif
#include "Leaks.h"
using namespace swift;
@@ -115,12 +118,118 @@ static HeapObject *_swift_tryRetain_(HeapObject *object)
return _ ## name ## _ args; \
} while(0)
#if SWIFT_STDLIB_HAS_MALLOC_TYPE
static malloc_type_summary_t
computeMallocTypeSummary(const HeapMetadata *heapMetadata) {
assert(isHeapMetadataKind(heapMetadata->getKind()));
auto *classMetadata = heapMetadata->getClassObject();
auto *typeDesc = heapMetadata->getTypeContextDescriptor();
malloc_type_summary_t summary = {};
// Objc
if (classMetadata && classMetadata->isPureObjC()) {
summary.type_kind = MALLOC_TYPE_KIND_OBJC;
return summary;
}
// Runtime internal and unclassified
if (!typeDesc) {
summary.type_kind = MALLOC_TYPE_KIND_CXX;
return summary;
}
// Swift
summary.type_kind = MALLOC_TYPE_KIND_SWIFT;
bool isGenericData = true;
for (auto &field : *typeDesc->Fields.get()) {
if (field.isIndirectCase()) {
isGenericData = false;
if (field.isVar())
summary.layout_semantics.data_pointer = true;
else
summary.layout_semantics.immutable_pointer = true;
}
}
if (classMetadata->Flags & ClassFlags::UsesSwiftRefcounting) {
summary.layout_semantics.reference_count = true;
} else {
summary.layout_semantics.generic_data = isGenericData;
}
return summary;
// FIXME: these are all the things we are potentially interested in
// typedef struct {
// bool data_pointer : 1;
// bool struct_pointer : 1;
// bool immutable_pointer : 1;
// bool anonymous_pointer : 1;
// bool reference_count : 1;
// bool resource_handle : 1;
// bool spatial_bounds : 1;
// bool tainted_data : 1;
// bool generic_data : 1;
// uint16_t unused : 7;
// } malloc_type_layout_semantics_t;
}
struct MallocTypeCacheEntry {
// union malloc_type_descriptor_t {
// struct {
// uint32_t hash;
// malloc_type_summary_t summary;
// };
// malloc_type_id_t type_id;
// };
malloc_type_descriptor_t desc;
friend llvm::hash_code hash_value(const MallocTypeCacheEntry &entry) {
return hash_value(entry.desc.hash);
}
bool matchesKey(uint32_t key) const { return desc.hash == key; }
};
static ConcurrentReadableHashMap<MallocTypeCacheEntry> MallocTypes;
static malloc_type_id_t getMallocTypeId(const HeapMetadata *heapMetadata) {
uint64_t metadataPtrBits = reinterpret_cast<uint64_t>(heapMetadata);
uint32_t key = (metadataPtrBits >> 32) ^ (metadataPtrBits >> 0);
{
auto snapshot = MallocTypes.snapshot();
if (auto *entry = snapshot.find(key))
return entry->desc.type_id;
}
malloc_type_descriptor_t desc = {
.hash = key,
.summary = computeMallocTypeSummary(heapMetadata)
};
MallocTypes.getOrInsert(
key, [desc](MallocTypeCacheEntry *entry, bool created) {
if (created)
entry->desc = desc;
return true;
});
return desc.type_id;
}
#endif // SWIFT_STDLIB_HAS_MALLOC_TYPE
static HeapObject *_swift_allocObject_(HeapMetadata const *metadata,
size_t requiredSize,
size_t requiredAlignmentMask) {
assert(isAlignmentMask(requiredAlignmentMask));
#if SWIFT_STDLIB_HAS_MALLOC_TYPE
auto object = reinterpret_cast<HeapObject *>(swift_slowAllocTyped(
requiredSize, requiredAlignmentMask, getMallocTypeId(metadata)));
#else
auto object = reinterpret_cast<HeapObject *>(
swift_slowAlloc(requiredSize, requiredAlignmentMask));
#endif
// NOTE: this relies on the C++17 guaranteed semantics of no null-pointer
// check on the placement new allocator which we have observed on Windows,