diff --git a/include/hyprutils/memory/Atomic.hpp b/include/hyprutils/memory/Atomic.hpp index f69acd5..d1145d2 100644 --- a/include/hyprutils/memory/Atomic.hpp +++ b/include/hyprutils/memory/Atomic.hpp @@ -35,6 +35,11 @@ namespace Hyprutils::Memory { std::lock_guard lockGuard() { return std::lock_guard(m_mutex); } + + // Needed when unlock order or mutex lifetime matters. + std::recursive_mutex& getMutex() { + return m_mutex; + } }; } @@ -87,19 +92,12 @@ namespace Hyprutils::Memory { } ~CAtomicSharedPointer() { - if (!m_ptr.impl_) - return; - - auto lg = implLockGuard(); - m_ptr.reset(); + reset(); } template validHierarchy&> operator=(const CAtomicSharedPointer& rhs) { - if (m_ptr.impl_) { - auto lg = implLockGuard(); - m_ptr.reset(); - } + reset(); if (!rhs.m_ptr.impl_) return *this; @@ -113,10 +111,7 @@ namespace Hyprutils::Memory { if (this == &rhs) return *this; - if (m_ptr.impl_) { - auto lg = implLockGuard(); - m_ptr.reset(); - } + reset(); if (!rhs.m_ptr.impl_) return *this; @@ -144,8 +139,43 @@ namespace Hyprutils::Memory { if (!m_ptr.impl_) return; - auto lg = implLockGuard(); - m_ptr.reset(); + // last ref and last wref? + // -> must unlock BEFORE reset + // not last ref? + // -> must unlock AFTER reset + auto& mutex = ((Atomic_::impl*)m_ptr.impl_)->getMutex(); + mutex.lock(); + + if (m_ptr.impl_->ref() > 1) { + m_ptr.reset(); + mutex.unlock(); + return; + } + + if (m_ptr.impl_->wref() == 0) { + mutex.unlock(); // Don't hold the mutex when destroying it + m_ptr.reset(); + // mutex invalid + return; + } else { + // When the control block gets destroyed, the mutex is destroyed with it. + // Thus we must avoid attempting an unlock after impl_ has been destroyed. + // Without the workaround is no safe way of checking whether it has been destroyed or not. + // + // To avoid this altogether, keep a weak pointer here. + // This guarantees that impl_ is still valid after the reset. + CWeakPointer guard = m_ptr; + m_ptr.reset(); + + // Now we can safely check if guard is the last wref. + if (guard.impl_->wref() == 1) { + mutex.unlock(); + return; // ~guard destroys impl_ and mutex + } + + guard.reset(); + mutex.unlock(); + } } T& operator*() const { @@ -241,19 +271,12 @@ namespace Hyprutils::Memory { } ~CAtomicWeakPointer() { - if (!m_ptr.impl_) - return; - - auto lg = implLockGuard(); - m_ptr.reset(); + reset(); } template validHierarchy&> operator=(const CAtomicWeakPointer& rhs) { - if (m_ptr.impl_) { - auto lg = implLockGuard(); - m_ptr.reset(); - } + reset(); auto lg = rhs.implLockGuard(); m_ptr = rhs.m_ptr; @@ -264,10 +287,7 @@ namespace Hyprutils::Memory { if (this == &rhs) return *this; - if (m_ptr.impl_) { - auto lg = implLockGuard(); - m_ptr.reset(); - } + reset(); auto lg = rhs.implLockGuard(); m_ptr = rhs.m_ptr; @@ -292,8 +312,20 @@ namespace Hyprutils::Memory { if (!m_ptr.impl_) return; - auto lg = implLockGuard(); + // last ref and last wref? + // -> must unlock BEFORE reset + // not last ref? + // -> must unlock AFTER reset + auto& mutex = ((Atomic_::impl*)m_ptr.impl_)->getMutex(); + if (m_ptr.impl_->ref() == 0 && m_ptr.impl_->wref() == 1) { + mutex.unlock(); + m_ptr.reset(); + // mutex invalid + return; + } + m_ptr.reset(); + mutex.unlock(); } T& operator*() const {