//===--- RefCount.cpp -----------------------------------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// #include "swift/Runtime/HeapObject.h" namespace swift { template void RefCounts::incrementSlow(RefCountBits oldbits, uint32_t n) { if (oldbits.hasSideTable()) { // Out-of-line slow path. auto side = oldbits.getSideTable(); side->incrementStrong(n); } else { // Retain count overflow. swift::swift_abortRetainOverflow(); } } template void RefCounts::incrementSlow(InlineRefCountBits oldbits, uint32_t n); template void RefCounts::incrementSlow(SideTableRefCountBits oldbits, uint32_t n); template void RefCounts::incrementNonAtomicSlow(RefCountBits oldbits, uint32_t n) { if (oldbits.hasSideTable()) { // Out-of-line slow path. auto side = oldbits.getSideTable(); side->incrementStrong(n); // FIXME: can there be a nonatomic impl? } else { swift::swift_abortRetainOverflow(); } } template void RefCounts::incrementNonAtomicSlow(InlineRefCountBits oldbits, uint32_t n); template void RefCounts::incrementNonAtomicSlow(SideTableRefCountBits oldbits, uint32_t n); template bool RefCounts::tryIncrementSlow(RefCountBits oldbits) { if (oldbits.hasSideTable()) return oldbits.getSideTable()->tryIncrement(); else swift::swift_abortRetainOverflow(); } template bool RefCounts::tryIncrementSlow(InlineRefCountBits oldbits); template bool RefCounts::tryIncrementSlow(SideTableRefCountBits oldbits); template bool RefCounts::tryIncrementNonAtomicSlow(RefCountBits oldbits) { if (oldbits.hasSideTable()) return oldbits.getSideTable()->tryIncrementNonAtomic(); else swift::swift_abortRetainOverflow(); } template bool RefCounts::tryIncrementNonAtomicSlow(InlineRefCountBits oldbits); template bool RefCounts::tryIncrementNonAtomicSlow(SideTableRefCountBits oldbits); template bool RefCounts::tryIncrementAndPinSlow(RefCountBits oldbits) { if (oldbits.hasSideTable()) return oldbits.getSideTable()->tryIncrementAndPin(); else swift::swift_abortRetainOverflow(); } template bool RefCounts::tryIncrementAndPinSlow(InlineRefCountBits oldbits); template bool RefCounts::tryIncrementAndPinSlow(SideTableRefCountBits oldbits); template bool RefCounts::tryIncrementAndPinNonAtomicSlow(RefCountBits oldbits) { if (oldbits.hasSideTable()) return oldbits.getSideTable()->tryIncrementAndPinNonAtomic(); else swift::swift_abortRetainOverflow(); } template bool RefCounts::tryIncrementAndPinNonAtomicSlow(InlineRefCountBits oldbits); template bool RefCounts::tryIncrementAndPinNonAtomicSlow(SideTableRefCountBits oldbits); // Return an object's side table, allocating it if necessary. // Returns null if the object is deiniting. // SideTableRefCountBits specialization intentionally does not exist. template <> HeapObjectSideTableEntry* RefCounts::allocateSideTable() { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); // Preflight failures before allocating a new side table. if (oldbits.hasSideTable()) { // Already have a side table. Return it. return oldbits.getSideTable(); } else if (oldbits.getIsDeiniting()) { // Already past the start of deinit. Do nothing. return nullptr; } // Preflight passed. Allocate a side table. // FIXME: custom side table allocator HeapObjectSideTableEntry *side = new HeapObjectSideTableEntry(getHeapObject()); auto newbits = InlineRefCountBits(side); do { if (oldbits.hasSideTable()) { // Already have a side table. Return it and delete ours. // Read before delete to streamline barriers. auto result = oldbits.getSideTable(); delete side; return result; } else if (oldbits.getIsDeiniting()) { // Already past the start of deinit. Do nothing. return nullptr; } side->initRefCounts(oldbits); } while (! refCounts.compare_exchange_weak(oldbits, newbits, std::memory_order_release, std::memory_order_relaxed)); return side; } // SideTableRefCountBits specialization intentionally does not exist. template <> HeapObjectSideTableEntry* RefCounts::formWeakReference() { auto side = allocateSideTable(); if (side) return side->incrementWeak(); else return nullptr; } // namespace swift } // namespace swift