diff --git a/libraries/libmesosphere/include/mesosphere/kern_k_thread.hpp b/libraries/libmesosphere/include/mesosphere/kern_k_thread.hpp index 487f36439..8904a6a92 100644 --- a/libraries/libmesosphere/include/mesosphere/kern_k_thread.hpp +++ b/libraries/libmesosphere/include/mesosphere/kern_k_thread.hpp @@ -105,7 +105,7 @@ namespace ams::kern { util::Atomic dpc_flags; u8 current_svc_id; u8 reserved_2c; - u8 exception_flags; + util::Atomic exception_flags; bool is_pinned; u8 reserved_2f; u8 reserved_30[0x10]; @@ -417,17 +417,17 @@ namespace ams::kern { private: ALWAYS_INLINE void SetExceptionFlag(ExceptionFlag flag) { MESOSPHERE_ASSERT_THIS(); - this->GetStackParameters().exception_flags |= flag; + this->GetStackParameters().exception_flags.FetchOr(flag); } ALWAYS_INLINE void ClearExceptionFlag(ExceptionFlag flag) { MESOSPHERE_ASSERT_THIS(); - this->GetStackParameters().exception_flags &= ~flag; + this->GetStackParameters().exception_flags.FetchAnd(~flag); } ALWAYS_INLINE bool IsExceptionFlagSet(ExceptionFlag flag) const { MESOSPHERE_ASSERT_THIS(); - return this->GetStackParameters().exception_flags & flag; + return this->GetStackParameters().exception_flags.Load() & flag; } public: /* ALWAYS_INLINE void SetCallingSvc() { return this->SetExceptionFlag(ExceptionFlag_IsCallingSvc); } */ diff --git a/libraries/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp b/libraries/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp index a06b54b5c..c2ac6c52f 100644 --- a/libraries/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp +++ b/libraries/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp @@ -285,23 +285,23 @@ namespace ams::util { return impl::AtomicCompareExchangeStrongImpl(this->GetStoragePointer(), expected, desired); } - #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ - template::type> \ - ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - volatile StorageType * const p = this->GetStoragePointer(); \ - \ - StorageType current; \ - do { \ - current = impl::LoadAcquireExclusiveForAtomic(p); \ - } while (AMS_UNLIKELY(!impl::StoreReleaseExclusiveForAtomic(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg)))); \ - return ConvertToType(current); \ - } \ - \ - template::type> \ - ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \ + #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ + template::type> \ + ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + volatile StorageType * const p = this->GetStoragePointer(); \ + \ + StorageType current; \ + do { \ + current = impl::LoadExclusiveForAtomicByMemoryOrder(p); \ + } while (AMS_UNLIKELY((!impl::StoreExclusiveForAtomicByMemoryOrder(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg))))); \ + return ConvertToType(current); \ + } \ + \ + template::type> \ + ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + return this->Fetch ## _OPERATION_ (arg) _OPERATOR_ arg; \ } AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(Add, +, true) @@ -389,23 +389,23 @@ namespace ams::util { return impl::AtomicCompareExchangeStrongImpl(this->GetStoragePointer(), expected, desired); } - #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ - template::type> \ - ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) const { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - volatile StorageType * const p = this->GetStoragePointer(); \ - \ - StorageType current; \ - do { \ - current = impl::LoadAcquireExclusiveForAtomic(p); \ - } while (AMS_UNLIKELY(!impl::StoreReleaseExclusiveForAtomic(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg)))); \ - return ConvertToType(current); \ - } \ - \ - template::type> \ - ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) const { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \ + #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ + template::type> \ + ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) const { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + volatile StorageType * const p = this->GetStoragePointer(); \ + \ + StorageType current; \ + do { \ + current = impl::LoadExclusiveForAtomicByMemoryOrder(p); \ + } while (AMS_UNLIKELY((!impl::StoreExclusiveForAtomicByMemoryOrder(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg))))); \ + return ConvertToType(current); \ + } \ + \ + template::type> \ + ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) const { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \ } AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(Add, +, true)