Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit fdad5af1 authored by Hans Boehm's avatar Hans Boehm Committed by android-build-merger
Browse files

Merge "Fix memory order and race bugs in Refbase.h & RefBase.cpp" am: 62212954

am: d657e639

* commit 'd657e639':
  Fix memory order and race bugs in Refbase.h & RefBase.cpp

Change-Id: I79106bb0399e7699d51d526235843504ab52708b
parents 11082d3a d657e639
Loading
Loading
Loading
Loading
+6 −5
Original line number Diff line number Diff line
@@ -17,7 +17,7 @@
#ifndef ANDROID_REF_BASE_H
#define ANDROID_REF_BASE_H

#include <cutils/atomic.h>
#include <atomic>

#include <stdint.h>
#include <sys/types.h>
@@ -176,16 +176,17 @@ class LightRefBase
public:
    inline LightRefBase() : mCount(0) { }
    inline void incStrong(__attribute__((unused)) const void* id) const {
        android_atomic_inc(&mCount);
        mCount.fetch_add(1, std::memory_order_relaxed);
    }
    inline void decStrong(__attribute__((unused)) const void* id) const {
        if (android_atomic_dec(&mCount) == 1) {
        if (mCount.fetch_sub(1, std::memory_order_release) == 1) {
            std::atomic_thread_fence(std::memory_order_acquire);
            delete static_cast<const T*>(this);
        }
    }
    //! DEBUGGING ONLY: Get current strong ref count.
    inline int32_t getStrongCount() const {
        return mCount;
        return mCount.load(std::memory_order_relaxed);
    }

    typedef LightRefBase<T> basetype;
@@ -200,7 +201,7 @@ private:
            const void* old_id, const void* new_id) { }

private:
    mutable volatile int32_t mCount;
    mutable std::atomic<int32_t> mCount;
};

// This is a wrapper around LightRefBase that simply enforces a virtual
+140 −58
Original line number Diff line number Diff line
@@ -27,7 +27,6 @@

#include <utils/RefBase.h>

#include <utils/Atomic.h>
#include <utils/CallStack.h>
#include <utils/Log.h>
#include <utils/threads.h>
@@ -57,6 +56,68 @@

namespace android {

// Usage, invariants, etc:

// It is normally OK just to keep weak pointers to an object.  The object will
// be deallocated by decWeak when the last weak reference disappears.
// Once a a strong reference has been created, the object will disappear once
// the last strong reference does (decStrong).
// AttemptIncStrong will succeed if the object has a strong reference, or if it
// has a weak reference and has never had a strong reference.
// AttemptIncWeak really does succeed only if there is already a WEAK
// reference, and thus may fail when attemptIncStrong would succeed.
// OBJECT_LIFETIME_WEAK changes this behavior to retain the object
// unconditionally until the last reference of either kind disappears.  The
// client ensures that the extendObjectLifetime call happens before the dec
// call that would otherwise have deallocated the object, or before an
// attemptIncStrong call that might rely on it.  We do not worry about
// concurrent changes to the object lifetime.
// mStrong is the strong reference count.  mWeak is the weak reference count.
// Between calls, and ignoring memory ordering effects, mWeak includes strong
// references, and is thus >= mStrong.
//
// A weakref_impl is allocated as the value of mRefs in a RefBase object on
// construction.
// In the OBJECT_LIFETIME_STRONG case, it is deallocated in the RefBase
// destructor iff the strong reference count was never incremented. The
// destructor can be invoked either from decStrong, or from decWeak if there
// was never a strong reference. If the reference count had been incremented,
// it is deallocated directly in decWeak, and hence still lives as long as
// the last weak reference.
// In the OBJECT_LIFETIME_WEAK case, it is always deallocated from the RefBase
// destructor, which is always invoked by decWeak. DecStrong explicitly avoids
// the deletion in this case.
//
// Memory ordering:
// The client must ensure that every inc() call, together with all other
// accesses to the object, happens before the corresponding dec() call.
//
// We try to keep memory ordering constraints on atomics as weak as possible,
// since memory fences or ordered memory accesses are likely to be a major
// performance cost for this code. All accesses to mStrong, mWeak, and mFlags
// explicitly relax memory ordering in some way.
//
// The only operations that are not memory_order_relaxed are reference count
// decrements. All reference count decrements are release operations.  In
// addition, the final decrement leading the deallocation is followed by an
// acquire fence, which we can view informally as also turning it into an
// acquire operation.  (See 29.8p4 [atomics.fences] for details. We could
// alternatively use acq_rel operations for all decrements. This is probably
// slower on most current (2016) hardware, especially on ARMv7, but that may
// not be true indefinitely.)
//
// This convention ensures that the second-to-last decrement synchronizes with
// (in the language of 1.10 in the C++ standard) the final decrement of a
// reference count. Since reference counts are only updated using atomic
// read-modify-write operations, this also extends to any earlier decrements.
// (See "release sequence" in 1.10.)
//
// Since all operations on an object happen before the corresponding reference
// count decrement, and all reference count decrements happen before the final
// one, we are guaranteed that all other object accesses happen before the
// object is destroyed.


#define INITIAL_STRONG_VALUE (1<<28)

// ---------------------------------------------------------------------------
@@ -64,10 +125,10 @@ namespace android {
class RefBase::weakref_impl : public RefBase::weakref_type
{
public:
    volatile int32_t    mStrong;
    volatile int32_t    mWeak;
    std::atomic<int32_t>    mStrong;
    std::atomic<int32_t>    mWeak;
    RefBase* const          mBase;
    volatile int32_t    mFlags;
    std::atomic<int32_t>    mFlags;

#if !DEBUG_REFS

@@ -141,7 +202,7 @@ public:
    void addStrongRef(const void* id) {
        //ALOGD_IF(mTrackEnabled,
        //        "addStrongRef: RefBase=%p, id=%p", mBase, id);
        addRef(&mStrongRefs, id, mStrong);
        addRef(&mStrongRefs, id, mStrong.load(std::memory_order_relaxed));
    }

    void removeStrongRef(const void* id) {
@@ -150,7 +211,7 @@ public:
        if (!mRetain) {
            removeRef(&mStrongRefs, id);
        } else {
            addRef(&mStrongRefs, id, -mStrong);
            addRef(&mStrongRefs, id, -mStrong.load(std::memory_order_relaxed));
        }
    }

@@ -162,14 +223,14 @@ public:
    }

    void addWeakRef(const void* id) {
        addRef(&mWeakRefs, id, mWeak);
        addRef(&mWeakRefs, id, mWeak.load(std::memory_order_relaxed));
    }

    void removeWeakRef(const void* id) {
        if (!mRetain) {
            removeRef(&mWeakRefs, id);
        } else {
            addRef(&mWeakRefs, id, -mWeak);
            addRef(&mWeakRefs, id, -mWeak.load(std::memory_order_relaxed));
        }
    }

@@ -330,7 +391,7 @@ void RefBase::incStrong(const void* id) const
    refs->incWeak(id);
    
    refs->addStrongRef(id);
    const int32_t c = android_atomic_inc(&refs->mStrong);
    const int32_t c = refs->mStrong.fetch_add(1, std::memory_order_relaxed);
    ALOG_ASSERT(c > 0, "incStrong() called on %p after last strong ref", refs);
#if PRINT_REFS
    ALOGD("incStrong of %p from %p: cnt=%d\n", this, id, c);
@@ -339,7 +400,10 @@ void RefBase::incStrong(const void* id) const
        return;
    }

    android_atomic_add(-INITIAL_STRONG_VALUE, &refs->mStrong);
    int32_t old = refs->mStrong.fetch_sub(INITIAL_STRONG_VALUE,
            std::memory_order_relaxed);
    // A decStrong() must still happen after us.
    ALOG_ASSERT(old > INITIAL_STRONG_VALUE, "0x%x too small", old);
    refs->mBase->onFirstRef();
}

@@ -347,27 +411,39 @@ void RefBase::decStrong(const void* id) const
{
    weakref_impl* const refs = mRefs;
    refs->removeStrongRef(id);
    const int32_t c = android_atomic_dec(&refs->mStrong);
    const int32_t c = refs->mStrong.fetch_sub(1, std::memory_order_release);
#if PRINT_REFS
    ALOGD("decStrong of %p from %p: cnt=%d\n", this, id, c);
#endif
    ALOG_ASSERT(c >= 1, "decStrong() called on %p too many times", refs);
    if (c == 1) {
        std::atomic_thread_fence(std::memory_order_acquire);
        refs->mBase->onLastStrongRef(id);
        if ((refs->mFlags&OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_STRONG) {
        int32_t flags = refs->mFlags.load(std::memory_order_relaxed);
        if ((flags&OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_STRONG) {
            delete this;
            // Since mStrong had been incremented, the destructor did not
            // delete refs.
        }
    }
    // Note that even with only strong reference operations, the thread
    // deallocating this may not be the same as the thread deallocating refs.
    // That's OK: all accesses to this happen before its deletion here,
    // and all accesses to refs happen before its deletion in the final decWeak.
    // The destructor can safely access mRefs because either it's deleting
    // mRefs itself, or it's running entirely before the final mWeak decrement.
    refs->decWeak(id);
}

void RefBase::forceIncStrong(const void* id) const
{
    // Allows initial mStrong of 0 in addition to INITIAL_STRONG_VALUE.
    // TODO: Better document assumptions.
    weakref_impl* const refs = mRefs;
    refs->incWeak(id);
    
    refs->addStrongRef(id);
    const int32_t c = android_atomic_inc(&refs->mStrong);
    const int32_t c = refs->mStrong.fetch_add(1, std::memory_order_relaxed);
    ALOG_ASSERT(c >= 0, "forceIncStrong called on %p after ref count underflow",
               refs);
#if PRINT_REFS
@@ -376,7 +452,8 @@ void RefBase::forceIncStrong(const void* id) const

    switch (c) {
    case INITIAL_STRONG_VALUE:
        android_atomic_add(-INITIAL_STRONG_VALUE, &refs->mStrong);
        refs->mStrong.fetch_sub(INITIAL_STRONG_VALUE,
                std::memory_order_relaxed);
        // fall through...
    case 0:
        refs->mBase->onFirstRef();
@@ -385,7 +462,8 @@ void RefBase::forceIncStrong(const void* id) const

int32_t RefBase::getStrongCount() const
{
    return mRefs->mStrong;
    // Debugging only; No memory ordering guarantees.
    return mRefs->mStrong.load(std::memory_order_relaxed);
}

RefBase* RefBase::weakref_type::refBase() const
@@ -397,7 +475,8 @@ void RefBase::weakref_type::incWeak(const void* id)
{
    weakref_impl* const impl = static_cast<weakref_impl*>(this);
    impl->addWeakRef(id);
    const int32_t c __unused = android_atomic_inc(&impl->mWeak);
    const int32_t c __unused = impl->mWeak.fetch_add(1,
            std::memory_order_relaxed);
    ALOG_ASSERT(c >= 0, "incWeak called on %p after last weak ref", this);
}

@@ -406,16 +485,19 @@ void RefBase::weakref_type::decWeak(const void* id)
{
    weakref_impl* const impl = static_cast<weakref_impl*>(this);
    impl->removeWeakRef(id);
    const int32_t c = android_atomic_dec(&impl->mWeak);
    const int32_t c = impl->mWeak.fetch_sub(1, std::memory_order_release);
    ALOG_ASSERT(c >= 1, "decWeak called on %p too many times", this);
    if (c != 1) return;
    atomic_thread_fence(std::memory_order_acquire);

    if ((impl->mFlags&OBJECT_LIFETIME_WEAK) == OBJECT_LIFETIME_STRONG) {
    int32_t flags = impl->mFlags.load(std::memory_order_relaxed);
    if ((flags&OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_STRONG) {
        // This is the regular lifetime case. The object is destroyed
        // when the last strong reference goes away. Since weakref_impl
        // outlive the object, it is not destroyed in the dtor, and
        // we'll have to do it here.
        if (impl->mStrong == INITIAL_STRONG_VALUE) {
        if (impl->mStrong.load(std::memory_order_relaxed)
                == INITIAL_STRONG_VALUE) {
            // Special case: we never had a strong reference, so we need to
            // destroy the object now.
            delete impl->mBase;
@@ -424,22 +506,19 @@ void RefBase::weakref_type::decWeak(const void* id)
            delete impl;
        }
    } else {
        // less common case: lifetime is OBJECT_LIFETIME_{WEAK|FOREVER}
        impl->mBase->onLastWeakRef(id);
        if ((impl->mFlags&OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_WEAK) {
            // this is the OBJECT_LIFETIME_WEAK case. The last weak-reference
        // This is the OBJECT_LIFETIME_WEAK case. The last weak-reference
        // is gone, we can destroy the object.
        impl->mBase->onLastWeakRef(id);
        delete impl->mBase;
    }
}
}

bool RefBase::weakref_type::attemptIncStrong(const void* id)
{
    incWeak(id);
    
    weakref_impl* const impl = static_cast<weakref_impl*>(this);
    int32_t curCount = impl->mStrong;
    int32_t curCount = impl->mStrong.load(std::memory_order_relaxed);

    ALOG_ASSERT(curCount >= 0,
            "attemptIncStrong called on %p after underflow", this);
@@ -447,19 +526,20 @@ bool RefBase::weakref_type::attemptIncStrong(const void* id)
    while (curCount > 0 && curCount != INITIAL_STRONG_VALUE) {
        // we're in the easy/common case of promoting a weak-reference
        // from an existing strong reference.
        if (android_atomic_cmpxchg(curCount, curCount+1, &impl->mStrong) == 0) {
        if (impl->mStrong.compare_exchange_weak(curCount, curCount+1,
                std::memory_order_relaxed)) {
            break;
        }
        // the strong count has changed on us, we need to re-assert our
        // situation.
        curCount = impl->mStrong;
        // situation. curCount was updated by compare_exchange_weak.
    }
    
    if (curCount <= 0 || curCount == INITIAL_STRONG_VALUE) {
        // we're now in the harder case of either:
        // - there never was a strong reference on us
        // - or, all strong references have been released
        if ((impl->mFlags&OBJECT_LIFETIME_WEAK) == OBJECT_LIFETIME_STRONG) {
        int32_t flags = impl->mFlags.load(std::memory_order_relaxed);
        if ((flags&OBJECT_LIFETIME_MASK) == OBJECT_LIFETIME_STRONG) {
            // this object has a "normal" life-time, i.e.: it gets destroyed
            // when the last strong reference goes away
            if (curCount <= 0) {
@@ -473,13 +553,13 @@ bool RefBase::weakref_type::attemptIncStrong(const void* id)
            // there never was a strong-reference, so we can try to
            // promote this object; we need to do that atomically.
            while (curCount > 0) {
                if (android_atomic_cmpxchg(curCount, curCount + 1,
                        &impl->mStrong) == 0) {
                if (impl->mStrong.compare_exchange_weak(curCount, curCount+1,
                        std::memory_order_relaxed)) {
                    break;
                }
                // the strong count has changed on us, we need to re-assert our
                // situation (e.g.: another thread has inc/decStrong'ed us)
                curCount = impl->mStrong;
                // curCount has been updated.
            }

            if (curCount <= 0) {
@@ -499,7 +579,7 @@ bool RefBase::weakref_type::attemptIncStrong(const void* id)
            }
            // grab a strong-reference, which is always safe due to the
            // extended life-time.
            curCount = android_atomic_inc(&impl->mStrong);
            curCount = impl->mStrong.fetch_add(1, std::memory_order_relaxed);
        }

        // If the strong reference count has already been incremented by
@@ -518,21 +598,16 @@ bool RefBase::weakref_type::attemptIncStrong(const void* id)
    ALOGD("attemptIncStrong of %p from %p: cnt=%d\n", this, id, curCount);
#endif

    // now we need to fix-up the count if it was INITIAL_STRONG_VALUE
    // this must be done safely, i.e.: handle the case where several threads
    // curCount is the value of mStrong before we increment ed it.
    // Now we need to fix-up the count if it was INITIAL_STRONG_VALUE.
    // This must be done safely, i.e.: handle the case where several threads
    // were here in attemptIncStrong().
    curCount = impl->mStrong;
    while (curCount >= INITIAL_STRONG_VALUE) {
        ALOG_ASSERT(curCount > INITIAL_STRONG_VALUE,
                "attemptIncStrong in %p underflowed to INITIAL_STRONG_VALUE",
                this);
        if (android_atomic_cmpxchg(curCount, curCount-INITIAL_STRONG_VALUE,
                &impl->mStrong) == 0) {
            break;
        }
        // the strong-count changed on us, we need to re-assert the situation,
        // for e.g.: it's possible the fix-up happened in another thread.
        curCount = impl->mStrong;
    // curCount > INITIAL_STRONG_VALUE is OK, and can happen if we're doing
    // this in the middle of another incStrong.  The subtraction is handled
    // by the thread that started with INITIAL_STRONG_VALUE.
    if (curCount == INITIAL_STRONG_VALUE) {
        impl->mStrong.fetch_sub(INITIAL_STRONG_VALUE,
                std::memory_order_relaxed);
    }

    return true;
@@ -542,14 +617,15 @@ bool RefBase::weakref_type::attemptIncWeak(const void* id)
{
    weakref_impl* const impl = static_cast<weakref_impl*>(this);

    int32_t curCount = impl->mWeak;
    int32_t curCount = impl->mWeak.load(std::memory_order_relaxed);
    ALOG_ASSERT(curCount >= 0, "attemptIncWeak called on %p after underflow",
               this);
    while (curCount > 0) {
        if (android_atomic_cmpxchg(curCount, curCount+1, &impl->mWeak) == 0) {
        if (impl->mWeak.compare_exchange_weak(curCount, curCount+1,
                std::memory_order_relaxed)) {
            break;
        }
        curCount = impl->mWeak;
        // curCount has been updated.
    }

    if (curCount > 0) {
@@ -561,7 +637,9 @@ bool RefBase::weakref_type::attemptIncWeak(const void* id)

int32_t RefBase::weakref_type::getWeakCount() const
{
    return static_cast<const weakref_impl*>(this)->mWeak;
    // Debug only!
    return static_cast<const weakref_impl*>(this)->mWeak
            .load(std::memory_order_relaxed);
}

void RefBase::weakref_type::printRefs() const
@@ -592,17 +670,19 @@ RefBase::RefBase()

RefBase::~RefBase()
{
    if (mRefs->mStrong == INITIAL_STRONG_VALUE) {
    if (mRefs->mStrong.load(std::memory_order_relaxed)
            == INITIAL_STRONG_VALUE) {
        // we never acquired a strong (and/or weak) reference on this object.
        delete mRefs;
    } else {
        // life-time of this object is extended to WEAK or FOREVER, in
        // life-time of this object is extended to WEAK, in
        // which case weakref_impl doesn't out-live the object and we
        // can free it now.
        if ((mRefs->mFlags & OBJECT_LIFETIME_MASK) != OBJECT_LIFETIME_STRONG) {
        int32_t flags = mRefs->mFlags.load(std::memory_order_relaxed);
        if ((flags & OBJECT_LIFETIME_MASK) != OBJECT_LIFETIME_STRONG) {
            // It's possible that the weak count is not 0 if the object
            // re-acquired a weak reference in its destructor
            if (mRefs->mWeak == 0) {
            if (mRefs->mWeak.load(std::memory_order_relaxed) == 0) {
                delete mRefs;
            }
        }
@@ -613,7 +693,9 @@ RefBase::~RefBase()

void RefBase::extendObjectLifetime(int32_t mode)
{
    android_atomic_or(mode, &mRefs->mFlags);
    // Must be happens-before ordered with respect to construction or any
    // operation that could destroy the object.
    mRefs->mFlags.fetch_or(mode, std::memory_order_relaxed);
}

void RefBase::onFirstRef()