[Runtime] Fix unowned refcount overflow to side table during deinit.

32-bit has a 7-bit inline unowned refcount, then 31 bits in the side table. Overflowing the inline count in deinit on an object that didn't already have a side table would crash, because the code assumed that creating a side table in deinit was not allowed.

(64-bit has 31 bits inline and in the side table. Overflowing the inline count immediately overflows the side table as well, so there's no change in behavior there.)

rdar://problem/33765960
This commit is contained in:
Mike Ash
2018-01-25 22:26:10 -05:00
parent 9c37cb8636
commit 146833c9b5
4 changed files with 62 additions and 10 deletions

Binary file not shown.

View File

@@ -1265,12 +1265,16 @@ class RefCounts {
// Return weak reference count.
// Note that this is not equal to the number of outstanding weak pointers.
uint32_t getWeakCount() const;
HeapObjectSideTableEntry *getSideTable() const {
auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
return bits.getSideTable();
}
private:
HeapObject *getHeapObject();
HeapObjectSideTableEntry* allocateSideTable();
HeapObjectSideTableEntry* allocateSideTable(bool failIfDeiniting);
};
typedef RefCounts<InlineRefCountBits> InlineRefCounts;
@@ -1453,6 +1457,10 @@ class HeapObjectSideTableEntry {
uint32_t getWeakCount() const {
return refCounts.getWeakCount();
}
HeapObjectSideTableEntry *getSideTable() {
return refCounts.getSideTable();
}
};

View File

@@ -89,7 +89,7 @@ template bool RefCounts<SideTableRefCountBits>::tryIncrementAndPinNonAtomicSlow(
// Returns null if the object is deiniting.
// SideTableRefCountBits specialization intentionally does not exist.
template <>
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable()
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable(bool failIfDeiniting)
{
auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME);
@@ -98,7 +98,7 @@ HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable()
// Already have a side table. Return it.
return oldbits.getSideTable();
}
else if (oldbits.getIsDeiniting()) {
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
// Already past the start of deinit. Do nothing.
return nullptr;
}
@@ -118,7 +118,7 @@ HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable()
delete side;
return result;
}
else if (oldbits.getIsDeiniting()) {
else if (failIfDeiniting && oldbits.getIsDeiniting()) {
// Already past the start of deinit. Do nothing.
return nullptr;
}
@@ -136,7 +136,7 @@ HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::allocateSideTable()
template <>
HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::formWeakReference()
{
auto side = allocateSideTable();
auto side = allocateSideTable(true);
if (side)
return side->incrementWeak();
else
@@ -145,7 +145,7 @@ HeapObjectSideTableEntry* RefCounts<InlineRefCountBits>::formWeakReference()
template <typename RefCountBits>
void RefCounts<RefCountBits>::incrementUnownedSlow(uint32_t n) {
auto side = allocateSideTable();
auto side = allocateSideTable(false);
if (side)
return side->incrementUnowned(n);
// Overflow but side table allocation failed.

View File

@@ -10,6 +10,8 @@
//
//===----------------------------------------------------------------------===//
#include <functional>
#include "swift/Runtime/HeapObject.h"
#include "swift/Runtime/Metadata.h"
#include "swift/Demangling/ManglingMacros.h"
@@ -42,8 +44,11 @@ struct TestObject : HeapObject {
// On exit from deinit: is destroyed
WeakReference *WeakRef;
// Callback invoked during the object's deinit.
std::function<void()> DeinitCallback;
TestObject(size_t *addr, size_t value)
: Addr(addr), Value(value), CheckLifecycle(false), WeakRef(nullptr)
: Addr(addr), Value(value), CheckLifecycle(false), WeakRef(nullptr), DeinitCallback(nullptr)
{ }
};
@@ -91,8 +96,13 @@ static SWIFT_CC(swift) void deinitTestObject(SWIFT_CONTEXT HeapObject *_object)
}
}
if (object->DeinitCallback != nullptr) {
object->DeinitCallback();
}
*object->Addr = object->Value;
object->Addr = nullptr;
object->~TestObject();
swift_deallocObject(object, sizeof(TestObject), alignof(TestObject) - 1);
}
@@ -231,8 +241,6 @@ static void unownedReleaseALot(TestObject *object, uint64_t count) {
}
// Maximum legal unowned retain count. 31 bits with no implicit +1.
// (FIXME 32-bit architecture has 7 bit inline count;
// that bound does not yet have its own tests.)
const uint64_t maxURC = (1ULL << (32 - 1)) - 1;
TEST(LongRefcountingTest, unowned_retain_max) {
@@ -1047,3 +1055,39 @@ TEST(LongRefcountingTest, lifecycle_live_deiniting_deinited_freed_with_side_Deat
EXPECT_UNALLOCATED(side);
EXPECT_EQ(0, weakValue);
}
TEST(LongRefcountingTest, lifecycle_live_deiniting_urc_overflow_to_side) {
::testing::FLAGS_gtest_death_test_style = "threadsafe";
uint64_t urcOverflowCount;
switch(sizeof(void *)) {
// 32-bit has a 7-bit inline refcount that overflows into the side table.
case 4: urcOverflowCount = 1 << 7; break;
// 64-bit can't store any extra count in the side table, so there's nothing to test.
case 8: return;
// We should never see any other bitness.
default: FAIL(); break;
}
size_t deinited = 0;
auto object = allocTestObject(&deinited, 1);
HeapObjectSideTableEntry *side = nullptr;
object->DeinitCallback = [&]() {
for (uint64_t i = 0; i < urcOverflowCount; i++) {
swift_unownedRetain(object);
}
side = object->refCounts.getSideTable();
EXPECT_ALLOCATED(side);
for (uint64_t i = 0; i < urcOverflowCount; i++) {
swift_unownedRelease(object);
}
};
swift_release(object);
EXPECT_UNALLOCATED(object);
EXPECT_UNALLOCATED(side);
}