From 8f35a3eff7cfb5a931b542b6cfb7d609ed3b2194 Mon Sep 17 00:00:00 2001 From: Erik Eckstein Date: Fri, 24 Aug 2018 12:53:00 -0700 Subject: [PATCH] runtime: remove pinning in reference counting and pinning runtime entry points rdar://problem/35401813 --- docs/Runtime.md | 6 - include/swift/Runtime/HeapObject.h | 45 --- include/swift/Runtime/RuntimeFunctions.def | 58 ---- stdlib/public/SwiftShims/RefCount.h | 270 ++++-------------- stdlib/public/runtime/HeapObject.cpp | 40 --- stdlib/public/runtime/RefCount.cpp | 21 -- .../runtime/RuntimeInvocationsTracking.def | 4 - stdlib/public/runtime/SwiftObject.mm | 56 ---- unittests/runtime/Refcounting.cpp | 86 ------ 9 files changed, 49 insertions(+), 537 deletions(-) diff --git a/docs/Runtime.md b/docs/Runtime.md index 4ae81b47fc20f..2af89aa7da813 100644 --- a/docs/Runtime.md +++ b/docs/Runtime.md @@ -108,7 +108,6 @@ Returns a random number. Only used by allocation profiling tools. 000000000001cee0 T _swift_release_n 000000000001ce30 T _swift_retain 000000000001ce50 T _swift_retain_n -000000000001d140 T _swift_tryPin 000000000001d240 T _swift_tryRetain 0000000000027b10 T _swift_unknownObjectRelease 0000000000027a70 T _swift_unknownObjectRelease_n @@ -140,7 +139,6 @@ Returns a random number. Only used by allocation profiling tools. 000000000001d2b0 T _swift_unownedRetainStrong 000000000001d310 T _swift_unownedRetainStrongAndRelease 000000000001d060 T _swift_unownedRetain_n -000000000001d1b0 T _swift_unpin 000000000001ca20 T _swift_verifyEndOfLifetime 000000000001d680 T _swift_weakAssign 000000000001d830 T _swift_weakCopyAssign @@ -154,10 +152,6 @@ Returns a random number. Only used by allocation profiling tools. 000000000002afe0 T _swift_isUniquelyReferencedNonObjC 000000000002af50 T _swift_isUniquelyReferencedNonObjC_nonNull 000000000002b060 T _swift_isUniquelyReferencedNonObjC_nonNull_bridgeObject -000000000002b200 T _swift_isUniquelyReferencedOrPinnedNonObjC_nonNull -000000000002b130 T _swift_isUniquelyReferencedOrPinnedNonObjC_nonNull_bridgeObject -000000000002b2f0 T _swift_isUniquelyReferencedOrPinned_native -000000000002b290 T _swift_isUniquelyReferencedOrPinned_nonNull_native 000000000002af00 T _swift_isUniquelyReferenced_native 000000000002aea0 T _swift_isUniquelyReferenced_nonNull_native 00000000000????? T _swift_setDeallocating diff --git a/include/swift/Runtime/HeapObject.h b/include/swift/Runtime/HeapObject.h index 0d8dc9f387310..1eb4f823feb14 100644 --- a/include/swift/Runtime/HeapObject.h +++ b/include/swift/Runtime/HeapObject.h @@ -168,29 +168,6 @@ HeapObject *swift_tryRetain(HeapObject *object); SWIFT_RUNTIME_EXPORT bool swift_isDeallocating(HeapObject *object); -/// Attempts to atomically pin an object and increment its reference -/// count. Returns nil if the object was already pinned. -/// -/// The standard protocol is that the caller is responsible for -/// calling swift_unpin on the return value. -/// -/// The object reference may not be nil. -SWIFT_RUNTIME_EXPORT -HeapObject *swift_tryPin(HeapObject *object); - -SWIFT_RUNTIME_EXPORT -HeapObject *swift_nonatomic_tryPin(HeapObject *object); - -/// Given that an object is pinned, atomically unpin it and decrement -/// the reference count. -/// -/// The object reference may be nil (to simplify the protocol). -SWIFT_RUNTIME_EXPORT -void swift_unpin(HeapObject *object); - -SWIFT_RUNTIME_EXPORT -void swift_nonatomic_unpin(HeapObject *object); - /// Atomically decrements the retain count of an object. If the /// retain count reaches zero, the object is destroyed as follows: /// @@ -244,44 +221,22 @@ bool swift_isUniquelyReferencedNonObjC(const void *); SWIFT_RUNTIME_EXPORT bool swift_isUniquelyReferencedNonObjC_nonNull(const void *); -/// Is this non-null pointer a reference to an object that uses Swift -/// reference counting and is either uniquely referenced or pinned? -SWIFT_RUNTIME_EXPORT -bool swift_isUniquelyReferencedOrPinnedNonObjC_nonNull(const void *); - /// Is this non-null BridgeObject a unique reference to an object /// that uses Swift reference counting? SWIFT_RUNTIME_EXPORT bool swift_isUniquelyReferencedNonObjC_nonNull_bridgeObject( uintptr_t bits); -/// Is this non-null BridgeObject a unique or pinned reference to an -/// object that uses Swift reference counting? -SWIFT_RUNTIME_EXPORT -bool swift_isUniquelyReferencedOrPinnedNonObjC_nonNull_bridgeObject( - uintptr_t bits); - /// Is this native Swift pointer a non-null unique reference to /// an object? SWIFT_RUNTIME_EXPORT bool swift_isUniquelyReferenced_native(const struct HeapObject *); -/// Is this native Swift pointer a non-null unique or pinned reference -/// to an object? -SWIFT_RUNTIME_EXPORT -bool swift_isUniquelyReferencedOrPinned_native(const struct HeapObject *); - /// Is this non-null native Swift pointer a unique reference to /// an object? SWIFT_RUNTIME_EXPORT bool swift_isUniquelyReferenced_nonNull_native(const struct HeapObject *); -/// Does this non-null native Swift pointer refer to an object that -/// is either uniquely referenced or pinned? -SWIFT_RUNTIME_EXPORT -bool swift_isUniquelyReferencedOrPinned_nonNull_native( - const struct HeapObject *); - /// Is this native Swift pointer non-null and has a reference count greater than /// one. /// This runtime call will print an error message with file name and location if diff --git a/include/swift/Runtime/RuntimeFunctions.def b/include/swift/Runtime/RuntimeFunctions.def index 5c687982fd3de..43833f6791c2e 100644 --- a/include/swift/Runtime/RuntimeFunctions.def +++ b/include/swift/Runtime/RuntimeFunctions.def @@ -258,12 +258,6 @@ FUNCTION(NativeNonAtomicStrongRetain, swift_nonatomic_retain, C_CC, ARGS(RefCountedPtrTy), ATTRS(NoUnwind, FirstParamReturned)) -// void *swift_tryPin(void *ptr); -FUNCTION(NativeTryPin, swift_tryPin, C_CC, - RETURNS(RefCountedPtrTy), - ARGS(RefCountedPtrTy), - ATTRS(NoUnwind)) - // void swift_nonatomic_release(void *ptr); FUNCTION(NativeNonAtomicStrongRelease, swift_nonatomic_release, C_CC, RETURNS(VoidTy), @@ -276,30 +270,12 @@ FUNCTION(NativeTryRetain, swift_tryRetain, C_CC, ARGS(RefCountedPtrTy), ATTRS(NoUnwind)) -// void swift_unpin(void *ptr); -FUNCTION(NativeUnpin, swift_unpin, C_CC, - RETURNS(VoidTy), - ARGS(RefCountedPtrTy), - ATTRS(NoUnwind)) - // bool swift_isDeallocating(void *ptr); FUNCTION(IsDeallocating, swift_isDeallocating, C_CC, RETURNS(Int1Ty), ARGS(RefCountedPtrTy), ATTRS(NoUnwind, ZExt)) -// void *swift_nonatomic_tryPin(void *ptr); -FUNCTION(NonAtomicNativeTryPin, swift_nonatomic_tryPin, C_CC, - RETURNS(RefCountedPtrTy), - ARGS(RefCountedPtrTy), - ATTRS(NoUnwind)) - -// void swift_nonatomic_unpin(void *ptr); -FUNCTION(NonAtomicNativeUnpin, swift_nonatomic_unpin, C_CC, - RETURNS(VoidTy), - ARGS(RefCountedPtrTy), - ATTRS(NoUnwind)) - // void *swift_unknownObjectRetain(void *ptr); FUNCTION(UnknownObjectRetain, swift_unknownObjectRetain, C_CC, RETURNS(UnknownRefCountedPtrTy), @@ -459,14 +435,6 @@ FUNCTION(IsUniquelyReferencedNonObjC_nonNull, ARGS(UnknownRefCountedPtrTy), ATTRS(NoUnwind, ZExt)) -// bool swift_isUniquelyReferencedOrPinnedNonObjC_nonNull(const void *); -FUNCTION(IsUniquelyReferencedOrPinnedNonObjC_nonNull, - swift_isUniquelyReferencedOrPinnedNonObjC_nonNull, - C_CC, - RETURNS(Int1Ty), - ARGS(UnknownRefCountedPtrTy), - ATTRS(NoUnwind, ZExt)) - // bool swift_isUniquelyReferencedNonObjC_nonNull_bridgeObject( // uintptr_t bits); FUNCTION(IsUniquelyReferencedNonObjC_nonNull_bridgeObject, @@ -476,15 +444,6 @@ FUNCTION(IsUniquelyReferencedNonObjC_nonNull_bridgeObject, ARGS(BridgeObjectPtrTy), ATTRS(NoUnwind, ZExt)) -// bool swift_isUniquelyReferencedOrPinnedNonObjC_nonNull_bridgeObject( -// uintptr_t bits); -FUNCTION(IsUniquelyReferencedOrPinnedNonObjC_nonNull_bridgeObject, - swift_isUniquelyReferencedOrPinnedNonObjC_nonNull_bridgeObject, - C_CC, - RETURNS(Int1Ty), - ARGS(BridgeObjectPtrTy), - ATTRS(NoUnwind, ZExt)) - // bool swift_isUniquelyReferenced_native(const struct HeapObject *); FUNCTION(IsUniquelyReferenced_native, swift_isUniquelyReferenced_native, C_CC, @@ -492,14 +451,6 @@ FUNCTION(IsUniquelyReferenced_native, swift_isUniquelyReferenced_native, ARGS(RefCountedPtrTy), ATTRS(NoUnwind, ZExt)) -// bool swift_isUniquelyReferencedOrPinned_native(const struct HeapObject *); -FUNCTION(IsUniquelyReferencedOrPinned_native, - swift_isUniquelyReferencedOrPinned_native, - C_CC, - RETURNS(Int1Ty), - ARGS(RefCountedPtrTy), - ATTRS(NoUnwind, ZExt)) - // bool swift_isUniquelyReferenced_nonNull_native(const struct HeapObject *); FUNCTION(IsUniquelyReferenced_nonNull_native, swift_isUniquelyReferenced_nonNull_native, @@ -508,15 +459,6 @@ FUNCTION(IsUniquelyReferenced_nonNull_native, ARGS(RefCountedPtrTy), ATTRS(NoUnwind, ZExt)) -// bool swift_isUniquelyReferencedOrPinned_nonNull_native( -// const struct HeapObject *); -FUNCTION(IsUniquelyReferencedOrPinned_nonNull_native, - swift_isUniquelyReferencedOrPinned_nonNull_native, - C_CC, - RETURNS(Int1Ty), - ARGS(RefCountedPtrTy), - ATTRS(NoUnwind, ZExt)) - // bool swift_isEscapingClosureAtFileLocation(const struct HeapObject *object, // const unsigned char *filename, // int32_t filenameLength, diff --git a/stdlib/public/SwiftShims/RefCount.h b/stdlib/public/SwiftShims/RefCount.h index e8a07ac9f5bc7..170137115bc04 100644 --- a/stdlib/public/SwiftShims/RefCount.h +++ b/stdlib/public/SwiftShims/RefCount.h @@ -191,8 +191,6 @@ namespace swift { // RefCountNotInline: refcount stored in an object's side table entry enum RefCountInlinedness { RefCountNotInline = false, RefCountIsInline = true }; -enum ClearPinnedFlag { DontClearPinnedFlag = false, DoClearPinnedFlag = true }; - enum PerformDeinit { DontPerformDeinit = false, DoPerformDeinit = true }; @@ -240,11 +238,14 @@ struct RefCountBitOffsets; // 32-bit out of line template <> struct RefCountBitOffsets<8> { - static const size_t IsPinnedShift = 0; - static const size_t IsPinnedBitCount = 1; - static const uint64_t IsPinnedMask = maskForField(IsPinned); - - static const size_t UnownedRefCountShift = shiftAfterField(IsPinned); + // We reserve 1 bit (which we likely be using in future) to make the + // unowned field 31 bit. The reason is that unowned overflow checking does + // not work with 32 bit in the current implementation. + static const size_t ReservedShift = 0; + static const size_t ReservedBitCount = 1; + static const uint32_t ReservedMask = maskForField(Reserved); + + static const size_t UnownedRefCountShift = shiftAfterField(Reserved); static const size_t UnownedRefCountBitCount = 31; static const uint64_t UnownedRefCountMask = maskForField(UnownedRefCount); @@ -273,12 +274,12 @@ struct RefCountBitOffsets<8> { // 32-bit inline template <> struct RefCountBitOffsets<4> { - static const size_t IsPinnedShift = 0; - static const size_t IsPinnedBitCount = 1; - static const uint32_t IsPinnedMask = maskForField(IsPinned); + static const size_t ReservedShift = 0; + static const size_t ReservedBitCount = 0; + static const uint32_t ReservedMask = maskForField(Reserved); - static const size_t UnownedRefCountShift = shiftAfterField(IsPinned); - static const size_t UnownedRefCountBitCount = 7; + static const size_t UnownedRefCountShift = shiftAfterField(Reserved); + static const size_t UnownedRefCountBitCount = 8; static const uint32_t UnownedRefCountMask = maskForField(UnownedRefCount); static const size_t IsDeinitingShift = shiftAfterField(UnownedRefCount); @@ -313,7 +314,7 @@ struct RefCountBitOffsets<4> { static_assert(SideTableBitCount + SideTableMarkBitCount + UseSlowRCBitCount == sizeof(bits)*8, "wrong bit count for RefCountBits side table encoding"); - static_assert(UnownedRefCountBitCount + IsPinnedBitCount + + static_assert(UnownedRefCountBitCount + IsDeinitingBitCount + StrongExtraRefCountBitCount + UseSlowRCBitCount == sizeof(bits)*8, "wrong bit count for RefCountBits refcount encoding"); @@ -405,7 +406,6 @@ class RefCountBitsT { // this is out-of-line and not the same layout as inline newbits. // Copy field-by-field. copyFieldFrom(newbits, UnownedRefCount); - copyFieldFrom(newbits, IsPinned); copyFieldFrom(newbits, IsDeiniting); copyFieldFrom(newbits, StrongExtraRefCount); copyFieldFrom(newbits, UseSlowRC); @@ -439,12 +439,6 @@ class RefCountBitsT { return uint32_t(getField(UnownedRefCount)); } - LLVM_ATTRIBUTE_ALWAYS_INLINE - bool getIsPinned() const { - assert(!hasSideTable()); - return bool(getField(IsPinned)); - } - LLVM_ATTRIBUTE_ALWAYS_INLINE bool getIsDeiniting() const { assert(!hasSideTable()); @@ -481,12 +475,6 @@ class RefCountBitsT { setField(UnownedRefCount, value); } - LLVM_ATTRIBUTE_ALWAYS_INLINE - void setIsPinned(bool value) { - assert(!hasSideTable()); - setField(IsPinned, value); - } - LLVM_ATTRIBUTE_ALWAYS_INLINE void setIsDeiniting(bool value) { assert(!hasSideTable()); @@ -514,17 +502,12 @@ class RefCountBitsT { // Returns false if the decrement should fall back to some slow path // (for example, because UseSlowRC is set // or because the refcount is now zero and should deinit). - template LLVM_NODISCARD LLVM_ATTRIBUTE_ALWAYS_INLINE bool decrementStrongExtraRefCount(uint32_t dec) { #ifndef NDEBUG if (!hasSideTable()) { // Can't check these assertions with side table present. - // clearPinnedFlag assumes the flag is already set. - if (clearPinnedFlag) - assert(getIsPinned() && "unpinning reference that was not pinned"); - if (getIsDeiniting()) assert(getStrongExtraRefCount() >= dec && "releasing reference whose refcount is already zero"); @@ -534,11 +517,8 @@ class RefCountBitsT { } #endif - BitsType unpin = (clearPinnedFlag - ? (BitsType(1) << Offsets::IsPinnedShift) - : 0); // This deliberately underflows by borrowing from the UseSlowRC field. - bits -= unpin + (BitsType(dec) << Offsets::StrongExtraRefCountShift); + bits -= BitsType(dec) << Offsets::StrongExtraRefCountShift; return (SignedBitsType(bits) >= 0); } @@ -557,14 +537,13 @@ class RefCountBitsT { LLVM_ATTRIBUTE_ALWAYS_INLINE bool isUniquelyReferenced() { - static_assert(Offsets::IsPinnedBitCount + + static_assert(Offsets::ReservedBitCount + Offsets::UnownedRefCountBitCount + Offsets::IsDeinitingBitCount + Offsets::StrongExtraRefCountBitCount + Offsets::UseSlowRCBitCount == sizeof(bits)*8, "inspect isUniquelyReferenced after adding fields"); - // isPinned: don't care // Unowned: don't care (FIXME: should care and redo initForNotFreeing) // IsDeiniting: false // StrongExtra: 0 @@ -575,53 +554,6 @@ class RefCountBitsT { !getUseSlowRC() && !getIsDeiniting() && getStrongExtraRefCount() == 0; } - LLVM_ATTRIBUTE_ALWAYS_INLINE - bool isUniquelyReferencedOrPinned() { - static_assert(Offsets::IsPinnedBitCount + - Offsets::UnownedRefCountBitCount + - Offsets::IsDeinitingBitCount + - Offsets::StrongExtraRefCountBitCount + - Offsets::UseSlowRCBitCount == sizeof(bits)*8, - "inspect isUniquelyReferencedOrPinned after adding fields"); - - // isPinned: don't care - // Unowned: don't care (FIXME: should care and redo initForNotFreeing) - // IsDeiniting: false - // isPinned/StrongExtra: true/any OR false/0 - // UseSlowRC: false - - // Compiler is not clever enough to optimize this. - // return (isUniquelyReferenced() || - // (!getUseSlowRC() && !getIsDeiniting() && getIsPinned())); - - // Bit twiddling solution: - // 1. Define the fields in this order: - // bits that must be zero when not pinned | bits to ignore | IsPinned - // 2. Rotate IsPinned into the sign bit: - // IsPinned | bits that must be zero when not pinned | bits to ignore - // 3. Perform a signed comparison against X = (1 << count of ignored bits). - // IsPinned makes the value negative and thus less than X. - // Zero in the must-be-zero bits makes the value less than X. - // Non-zero and not pinned makes the value greater or equal to X. - - // Count the ignored fields. - constexpr auto ignoredBitsCount = - Offsets::UnownedRefCountBitCount + Offsets::IsDeinitingBitCount; - // Make sure all fields are positioned as expected. - // -1 compensates for the rotation. - static_assert(Offsets::IsPinnedShift == 0, "IsPinned must be the LSB bit"); - static_assert( - shiftAfterField(Offsets::UnownedRefCount)-1 <= ignoredBitsCount && - shiftAfterField(Offsets::IsDeiniting)-1 <= ignoredBitsCount && - Offsets::StrongExtraRefCountShift-1 >= ignoredBitsCount && - Offsets::UseSlowRCShift-1 >= ignoredBitsCount, - "refcount bit layout incorrect for isUniquelyReferencedOrPinned"); - - BitsType X = BitsType(1) << ignoredBitsCount; - BitsType rotatedBits = ((bits >> 1) | (bits << (8*sizeof(bits) - 1))); - return SignedBitsType(rotatedBits) < SignedBitsType(X); - } - LLVM_ATTRIBUTE_ALWAYS_INLINE BitsType getBitsValue() { return bits; @@ -720,12 +652,6 @@ class RefCounts { LLVM_ATTRIBUTE_NOINLINE void incrementNonAtomicSlow(RefCountBits oldbits, uint32_t inc); - LLVM_ATTRIBUTE_NOINLINE - bool tryIncrementAndPinSlow(RefCountBits oldbits); - - LLVM_ATTRIBUTE_NOINLINE - bool tryIncrementAndPinNonAtomicSlow(RefCountBits); - LLVM_ATTRIBUTE_NOINLINE bool tryIncrementSlow(RefCountBits oldbits); @@ -787,51 +713,6 @@ class RefCounts { refCounts.store(newbits, std::memory_order_relaxed); } - // Try to simultaneously set the pinned flag and increment the - // reference count. If the flag is already set, don't increment the - // reference count. - // - // This is only a sensible protocol for strictly-nested modifications. - // - // Returns true if the flag was set by this operation. - // - // Postcondition: the flag is set. - bool tryIncrementAndPin() { - auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - RefCountBits newbits; - do { - // If the flag is already set, just fail. - if (!oldbits.hasSideTable() && oldbits.getIsPinned()) - return false; - - // Try to simultaneously set the flag and increment the reference count. - newbits = oldbits; - newbits.setIsPinned(true); - bool fast = newbits.incrementStrongExtraRefCount(1); - if (!fast) - return tryIncrementAndPinSlow(oldbits); - } while (!refCounts.compare_exchange_weak(oldbits, newbits, - std::memory_order_relaxed)); - return true; - } - - bool tryIncrementAndPinNonAtomic() { - auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - - // If the flag is already set, just fail. - if (!oldbits.hasSideTable() && oldbits.getIsPinned()) - return false; - - // Try to simultaneously set the flag and increment the reference count. - auto newbits = oldbits; - newbits.setIsPinned(true); - bool fast = newbits.incrementStrongExtraRefCount(1); - if (!fast) - return tryIncrementAndPinNonAtomicSlow(oldbits); - refCounts.store(newbits, std::memory_order_relaxed); - return true; - } - // Increment the reference count, unless the object is deiniting. bool tryIncrement() { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); @@ -862,40 +743,26 @@ class RefCounts { return true; } - // Simultaneously clear the pinned flag and decrement the reference - // count. Call _swift_release_dealloc() if the reference count goes to zero. - // - // Precondition: the pinned flag is set. - LLVM_ATTRIBUTE_ALWAYS_INLINE - void decrementAndUnpinAndMaybeDeinit() { - doDecrement(1); - } - - LLVM_ATTRIBUTE_ALWAYS_INLINE - void decrementAndUnpinAndMaybeDeinitNonAtomic() { - doDecrementNonAtomic(1); - } - // Decrement the reference count. // Return true if the caller should now deinit the object. LLVM_ATTRIBUTE_ALWAYS_INLINE bool decrementShouldDeinit(uint32_t dec) { - return doDecrement(dec); + return doDecrement(dec); } LLVM_ATTRIBUTE_ALWAYS_INLINE bool decrementShouldDeinitNonAtomic(uint32_t dec) { - return doDecrementNonAtomic(dec); + return doDecrementNonAtomic(dec); } LLVM_ATTRIBUTE_ALWAYS_INLINE void decrementAndMaybeDeinit(uint32_t dec) { - doDecrement(dec); + doDecrement(dec); } LLVM_ATTRIBUTE_ALWAYS_INLINE void decrementAndMaybeDeinitNonAtomic(uint32_t dec) { - doDecrementNonAtomic(dec); + doDecrementNonAtomic(dec); } // Non-atomically release the last strong reference and mark the @@ -936,27 +803,6 @@ class RefCounts { return bits.isUniquelyReferenced(); } - // Return whether the reference count is exactly 1 or the pin flag - // is set. Once deinit begins the reference count is undefined. - bool isUniquelyReferencedOrPinned() const { - auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); - // FIXME: implement side table path if useful - // In the meantime we don't check it here. - // bits.isUniquelyReferencedOrPinned() checks it too, - // and the compiler optimizer does better if this check is not here. - // if (bits.hasSideTable()) - // return false; - - assert(!bits.getIsDeiniting()); - - // bits.isUniquelyReferencedOrPinned() also checks the side table bit - // and this path is optimized better if we don't check it here first. - if (bits.isUniquelyReferencedOrPinned()) return true; - if (!bits.hasSideTable()) - return false; - return bits.getSideTable()->isUniquelyReferencedOrPinned(); - } - // Return true if the object has started deiniting. bool isDeiniting() const { auto bits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); @@ -997,17 +843,17 @@ class RefCounts { // Second slow path of doDecrement, where the // object may have a side table entry. - template + template bool doDecrementSideTable(RefCountBits oldbits, uint32_t dec); // Second slow path of doDecrementNonAtomic, where the // object may have a side table entry. - template + template bool doDecrementNonAtomicSideTable(RefCountBits oldbits, uint32_t dec); // First slow path of doDecrement, where the object may need to be deinited. // Side table is handled in the second slow path, doDecrementSideTable(). - template + template bool doDecrementSlow(RefCountBits oldbits, uint32_t dec) { RefCountBits newbits; @@ -1016,15 +862,14 @@ class RefCounts { newbits = oldbits; bool fast = - newbits.template decrementStrongExtraRefCount(dec); + newbits.decrementStrongExtraRefCount(dec); if (fast) { // Decrement completed normally. New refcount is not zero. deinitNow = false; } else if (oldbits.hasSideTable()) { // Decrement failed because we're on some other slow path. - return doDecrementSideTable(oldbits, dec); + return doDecrementSideTable(oldbits, dec); } else { // Decrement underflowed. Begin deinit. @@ -1034,8 +879,6 @@ class RefCounts { newbits = oldbits; // Undo failed decrement of newbits. newbits.setStrongExtraRefCount(0); newbits.setIsDeiniting(true); - if (clearPinnedFlag) - newbits.setIsPinned(false); } } while (!refCounts.compare_exchange_weak(oldbits, newbits, std::memory_order_release, @@ -1050,21 +893,20 @@ class RefCounts { // First slow path of doDecrementNonAtomic, where the object may need to be deinited. // Side table is handled in the second slow path, doDecrementNonAtomicSideTable(). - template + template bool doDecrementNonAtomicSlow(RefCountBits oldbits, uint32_t dec) { bool deinitNow; auto newbits = oldbits; bool fast = - newbits.template decrementStrongExtraRefCount(dec); + newbits.decrementStrongExtraRefCount(dec); if (fast) { // Decrement completed normally. New refcount is not zero. deinitNow = false; } else if (oldbits.hasSideTable()) { // Decrement failed because we're on some other slow path. - return doDecrementNonAtomicSideTable(oldbits, dec); + return doDecrementNonAtomicSideTable(oldbits, dec); } else { // Decrement underflowed. Begin deinit. @@ -1074,8 +916,6 @@ class RefCounts { newbits = oldbits; // Undo failed decrement of newbits. newbits.setStrongExtraRefCount(0); newbits.setIsDeiniting(true); - if (clearPinnedFlag) - newbits.setIsPinned(false); } refCounts.store(newbits, std::memory_order_relaxed); if (performDeinit && deinitNow) { @@ -1091,7 +931,7 @@ class RefCounts { // // Deinit is optionally handled directly instead of always deferring to // the caller because the compiler can optimize this arrangement better. - template + template bool doDecrement(uint32_t dec) { auto oldbits = refCounts.load(SWIFT_MEMORY_ORDER_CONSUME); RefCountBits newbits; @@ -1099,10 +939,10 @@ class RefCounts { do { newbits = oldbits; bool fast = - newbits.template decrementStrongExtraRefCount(dec); + newbits.decrementStrongExtraRefCount(dec); if (!fast) // Slow paths include side table; deinit; underflow - return doDecrementSlow(oldbits, dec); + return doDecrementSlow(oldbits, dec); } while (!refCounts.compare_exchange_weak(oldbits, newbits, std::memory_order_release, std::memory_order_relaxed)); @@ -1111,7 +951,7 @@ class RefCounts { } // This is independently specialized below for inline and out-of-line use. - template + template bool doDecrementNonAtomic(uint32_t dec); @@ -1325,18 +1165,18 @@ class HeapObjectSideTableEntry { refCounts.increment(inc); } - template + template bool decrementStrong(uint32_t dec) { - return refCounts.doDecrement(dec); + return refCounts.doDecrement(dec); } - template + template bool decrementNonAtomicStrong(uint32_t dec) { - return refCounts.doDecrementNonAtomic(dec); + return refCounts.doDecrementNonAtomic(dec); } void decrementFromOneNonAtomic() { - decrementNonAtomicStrong(1); + decrementNonAtomicStrong(1); } bool isDeiniting() const { @@ -1347,18 +1187,10 @@ class HeapObjectSideTableEntry { return refCounts.tryIncrement(); } - bool tryIncrementAndPin() { - return refCounts.tryIncrementAndPin(); - } - bool tryIncrementNonAtomic() { return refCounts.tryIncrementNonAtomic(); } - bool tryIncrementAndPinNonAtomic() { - return refCounts.tryIncrementAndPinNonAtomic(); - } - // Return weak reference count. // Note that this is not equal to the number of outstanding weak pointers. uint32_t getCount() const { @@ -1369,10 +1201,6 @@ class HeapObjectSideTableEntry { return refCounts.isUniquelyReferenced(); } - bool isUniquelyReferencedOrPinned() const { - return refCounts.isUniquelyReferencedOrPinned(); - } - // UNOWNED void incrementUnowned(uint32_t inc) { @@ -1464,7 +1292,7 @@ class HeapObjectSideTableEntry { // Inline version of non-atomic strong decrement. // This version can actually be non-atomic. template <> -template +template LLVM_ATTRIBUTE_ALWAYS_INLINE inline bool RefCounts::doDecrementNonAtomic(uint32_t dec) { @@ -1481,12 +1309,12 @@ inline bool RefCounts::doDecrementNonAtomic(uint32_t dec) { // Use slow path if we can't guarantee atomicity. if (oldbits.hasSideTable() || oldbits.getUnownedRefCount() != 1) - return doDecrementNonAtomicSlow(oldbits, dec); + return doDecrementNonAtomicSlow(oldbits, dec); auto newbits = oldbits; - bool fast = newbits.decrementStrongExtraRefCount(dec); + bool fast = newbits.decrementStrongExtraRefCount(dec); if (!fast) - return doDecrementNonAtomicSlow(oldbits, dec); + return doDecrementNonAtomicSlow(oldbits, dec); refCounts.store(newbits, std::memory_order_relaxed); return false; // don't deinit @@ -1496,31 +1324,31 @@ inline bool RefCounts::doDecrementNonAtomic(uint32_t dec) { // This version needs to be atomic because of the // threat of concurrent read of a weak reference. template <> -template +template inline bool RefCounts:: doDecrementNonAtomic(uint32_t dec) { - return doDecrement(dec); + return doDecrement(dec); } template <> -template +template inline bool RefCounts:: doDecrementSideTable(InlineRefCountBits oldbits, uint32_t dec) { auto side = oldbits.getSideTable(); - return side->decrementStrong(dec); + return side->decrementStrong(dec); } template <> -template +template inline bool RefCounts:: doDecrementNonAtomicSideTable(InlineRefCountBits oldbits, uint32_t dec) { auto side = oldbits.getSideTable(); - return side->decrementNonAtomicStrong(dec); + return side->decrementNonAtomicStrong(dec); } template <> -template +template inline bool RefCounts:: doDecrementSideTable(SideTableRefCountBits oldbits, uint32_t dec) { swift::crash("side table refcount must not have " @@ -1528,7 +1356,7 @@ doDecrementSideTable(SideTableRefCountBits oldbits, uint32_t dec) { } template <> -template +template inline bool RefCounts:: doDecrementNonAtomicSideTable(SideTableRefCountBits oldbits, uint32_t dec) { swift::crash("side table refcount must not have " diff --git a/stdlib/public/runtime/HeapObject.cpp b/stdlib/public/runtime/HeapObject.cpp index 6f70e4c916b26..32a9246d65971 100644 --- a/stdlib/public/runtime/HeapObject.cpp +++ b/stdlib/public/runtime/HeapObject.cpp @@ -478,46 +478,6 @@ void swift::swift_nonatomic_unownedRelease_n(HeapObject *object, int n) { } } -HeapObject *swift::swift_tryPin(HeapObject *object) { - SWIFT_RT_TRACK_INVOCATION(object, swift_tryPin); - assert(isValidPointerForNativeRetain(object)); - - // Try to set the flag. If this succeeds, the caller will be - // responsible for clearing it. - if (object->refCounts.tryIncrementAndPin()) - return object; - - // If setting the flag failed, it's because it was already set. - // Return nil so that the object will be deallocated later. - return nullptr; -} - -void swift::swift_unpin(HeapObject *object) { - SWIFT_RT_TRACK_INVOCATION(object, swift_unpin); - if (isValidPointerForNativeRetain(object)) - object->refCounts.decrementAndUnpinAndMaybeDeinit(); -} - -HeapObject *swift::swift_nonatomic_tryPin(HeapObject *object) { - SWIFT_RT_TRACK_INVOCATION(object, swift_nonatomic_tryPin); - assert(object); - - // Try to set the flag. If this succeeds, the caller will be - // responsible for clearing it. - if (object->refCounts.tryIncrementAndPinNonAtomic()) - return object; - - // If setting the flag failed, it's because it was already set. - // Return nil so that the object will be deallocated later. - return nullptr; -} - -void swift::swift_nonatomic_unpin(HeapObject *object) { - SWIFT_RT_TRACK_INVOCATION(object, swift_nonatomic_unpin); - if (isValidPointerForNativeRetain(object)) - object->refCounts.decrementAndUnpinAndMaybeDeinitNonAtomic(); -} - HeapObject *swift::swift_tryRetain(HeapObject *object) { return _swift_tryRetain(object); } diff --git a/stdlib/public/runtime/RefCount.cpp b/stdlib/public/runtime/RefCount.cpp index 07e2476766985..48cc7751a0e14 100644 --- a/stdlib/public/runtime/RefCount.cpp +++ b/stdlib/public/runtime/RefCount.cpp @@ -64,27 +64,6 @@ bool RefCounts::tryIncrementNonAtomicSlow(RefCountBits oldbits) { template bool RefCounts::tryIncrementNonAtomicSlow(InlineRefCountBits oldbits); template bool RefCounts::tryIncrementNonAtomicSlow(SideTableRefCountBits oldbits); -template -bool RefCounts::tryIncrementAndPinSlow(RefCountBits oldbits) { - if (oldbits.hasSideTable()) - return oldbits.getSideTable()->tryIncrementAndPin(); - else - swift::swift_abortRetainOverflow(); -} -template bool RefCounts::tryIncrementAndPinSlow(InlineRefCountBits oldbits); -template bool RefCounts::tryIncrementAndPinSlow(SideTableRefCountBits oldbits); - -template -bool RefCounts::tryIncrementAndPinNonAtomicSlow(RefCountBits oldbits) { - if (oldbits.hasSideTable()) - return oldbits.getSideTable()->tryIncrementAndPinNonAtomic(); - else - swift::swift_abortRetainOverflow(); -} -template bool RefCounts::tryIncrementAndPinNonAtomicSlow(InlineRefCountBits oldbits); -template bool RefCounts::tryIncrementAndPinNonAtomicSlow(SideTableRefCountBits oldbits); - - // Return an object's side table, allocating it if necessary. // Returns null if the object is deiniting. // SideTableRefCountBits specialization intentionally does not exist. diff --git a/stdlib/public/runtime/RuntimeInvocationsTracking.def b/stdlib/public/runtime/RuntimeInvocationsTracking.def index 9e636f54e290c..a01146b18c7b9 100644 --- a/stdlib/public/runtime/RuntimeInvocationsTracking.def +++ b/stdlib/public/runtime/RuntimeInvocationsTracking.def @@ -45,10 +45,6 @@ FUNCTION_TO_TRACK(swift_allocObject) FUNCTION_TO_TRACK(swift_deallocObject) FUNCTION_TO_TRACK(swift_initStackObject) FUNCTION_TO_TRACK(swift_initStaticObject) -FUNCTION_TO_TRACK(swift_tryPin) -FUNCTION_TO_TRACK(swift_unpin) -FUNCTION_TO_TRACK(swift_nonatomic_tryPin) -FUNCTION_TO_TRACK(swift_nonatomic_unpin) FUNCTION_TO_TRACK(swift_tryRetain) FUNCTION_TO_TRACK(swift_tryRelease) FUNCTION_TO_TRACK(swift_unownedRetainStrong) diff --git a/stdlib/public/runtime/SwiftObject.mm b/stdlib/public/runtime/SwiftObject.mm index e0f16f9801fed..f7137bd04236a 100644 --- a/stdlib/public/runtime/SwiftObject.mm +++ b/stdlib/public/runtime/SwiftObject.mm @@ -1356,52 +1356,6 @@ static bool usesNativeSwiftReferenceCounting_nonNull( #endif } -/// Return true if the given bits of a Builtin.BridgeObject refer to a -/// native swift object whose strong reference count is 1. -bool swift::swift_isUniquelyReferencedOrPinnedNonObjC_nonNull_bridgeObject( - uintptr_t bits -) { - auto bridgeObject = (void*)bits; - - if (isObjCTaggedPointer(bridgeObject)) - return false; - - const auto object = toPlainObject_unTagged_bridgeObject(bridgeObject); - - // Note: we could just return false if all spare bits are set, - // but in that case the cost of a deeper check for a unique native - // object is going to be a negligible cost for a possible big win. -#if SWIFT_OBJC_INTEROP - if (isNonNative_unTagged_bridgeObject(bridgeObject)) - return swift_isUniquelyReferencedOrPinnedNonObjC_nonNull(object); -#endif - return swift_isUniquelyReferencedOrPinned_nonNull_native( - (const HeapObject *)object); -} - - -/// Given a non-nil object reference, return true if the object is a -/// native swift object and either its strong reference count is 1 or -/// its pinned flag is set. -bool swift::swift_isUniquelyReferencedOrPinnedNonObjC_nonNull( - const void *object) { - assert(object != nullptr); - return -#if SWIFT_OBJC_INTEROP - usesNativeSwiftReferenceCounting_nonNull(object) && -#endif - swift_isUniquelyReferencedOrPinned_nonNull_native( - (const HeapObject*)object); -} - -// Given a non-@objc object reference, return true iff the -// object is non-nil and either has a strong reference count of 1 -// or is pinned. -bool swift::swift_isUniquelyReferencedOrPinned_native(const HeapObject *object){ - return object != nullptr && - swift_isUniquelyReferencedOrPinned_nonNull_native(object); -} - // Given a non-@objc object reference, return true iff the // object is non-nil and has a strong reference count greather than 1 bool swift::swift_isEscapingClosureAtFileLocation(const HeapObject *object, @@ -1447,16 +1401,6 @@ static bool usesNativeSwiftReferenceCounting_nonNull( return isEscaping; } -/// Given a non-nil native swift object reference, return true if -/// either the object has a strong reference count of 1 or its -/// pinned flag is set. -bool swift::swift_isUniquelyReferencedOrPinned_nonNull_native( - const HeapObject *object) { - assert(object != nullptr); - assert(!object->refCounts.isDeiniting()); - return object->refCounts.isUniquelyReferencedOrPinned(); -} - struct ClassExtents { size_t negative; size_t positive; diff --git a/unittests/runtime/Refcounting.cpp b/unittests/runtime/Refcounting.cpp index fe73d234929d3..1198e978f803a 100644 --- a/unittests/runtime/Refcounting.cpp +++ b/unittests/runtime/Refcounting.cpp @@ -66,37 +66,6 @@ TEST(RefcountingTest, retain_release) { EXPECT_EQ(1u, value); } -TEST(RefcountingTest, pin_unpin) { - size_t value = 0; - auto object = allocTestObject(&value, 1); - EXPECT_EQ(0u, value); - auto pinResult = swift_tryPin(object); - EXPECT_EQ(object, pinResult); - EXPECT_EQ(0u, value); - swift_release(object); - EXPECT_EQ(0u, value); - swift_unpin(object); - EXPECT_EQ(1u, value); -} - -TEST(RefcountingTest, pin_pin_unpin_unpin) { - size_t value = 0; - auto object = allocTestObject(&value, 1); - EXPECT_EQ(0u, value); - auto pinResult = swift_tryPin(object); - EXPECT_EQ(object, pinResult); - EXPECT_EQ(0u, value); - auto pinResult2 = swift_tryPin(object); - EXPECT_EQ(nullptr, pinResult2); - EXPECT_EQ(0u, value); - swift_unpin(pinResult2); - EXPECT_EQ(0u, value); - swift_release(object); - EXPECT_EQ(0u, value); - swift_unpin(object); - EXPECT_EQ(1u, value); -} - TEST(RefcountingTest, retain_release_n) { size_t value = 0; auto object = allocTestObject(&value, 1); @@ -186,30 +155,6 @@ TEST(RefcountingTest, isUniquelyReferenced) { EXPECT_EQ(1u, value); } -TEST(RefcountingTest, isUniquelyReferencedOrPinned) { - size_t value = 0; - auto object = allocTestObject(&value, 1); - EXPECT_EQ(0u, value); - // RC 1, unpinned - EXPECT_TRUE(swift_isUniquelyReferencedOrPinned_nonNull_native(object)); - - swift_retain(object); - // RC big, unpinned - EXPECT_FALSE(swift_isUniquelyReferencedOrPinned_nonNull_native(object)); - - swift_tryPin(object); - // RC big, pinned - EXPECT_TRUE(swift_isUniquelyReferencedOrPinned_nonNull_native(object)); - - swift_release(object); - // RC 1, pinned - EXPECT_TRUE(swift_isUniquelyReferencedOrPinned_nonNull_native(object)); - - swift_unpin(object); - swift_release(object); - EXPECT_EQ(1u, value); -} - ///////////////////////////////////////// // Non-atomic reference counting tests // ///////////////////////////////////////// @@ -234,37 +179,6 @@ TEST(RefcountingTest, nonatomic_retain_release) { EXPECT_EQ(1u, value); } -TEST(RefcountingTest, nonatomic_pin_unpin) { - size_t value = 0; - auto object = allocTestObject(&value, 1); - EXPECT_EQ(0u, value); - auto pinResult = swift_nonatomic_tryPin(object); - EXPECT_EQ(object, pinResult); - EXPECT_EQ(0u, value); - swift_nonatomic_release(object); - EXPECT_EQ(0u, value); - swift_nonatomic_unpin(object); - EXPECT_EQ(1u, value); -} - -TEST(RefcountingTest, nonatomic_pin_pin_unpin_unpin) { - size_t value = 0; - auto object = allocTestObject(&value, 1); - EXPECT_EQ(0u, value); - auto pinResult = swift_nonatomic_tryPin(object); - EXPECT_EQ(object, pinResult); - EXPECT_EQ(0u, value); - auto pinResult2 = swift_nonatomic_tryPin(object); - EXPECT_EQ(nullptr, pinResult2); - EXPECT_EQ(0u, value); - swift_nonatomic_unpin(pinResult2); - EXPECT_EQ(0u, value); - swift_nonatomic_release(object); - EXPECT_EQ(0u, value); - swift_nonatomic_unpin(object); - EXPECT_EQ(1u, value); -} - TEST(RefcountingTest, nonatomic_retain_release_n) { size_t value = 0; auto object = allocTestObject(&value, 1);