From b3c1e7be61413030705343641a7fabc626258cf0 Mon Sep 17 00:00:00 2001 From: Michael Scire Date: Tue, 11 Nov 2025 19:34:34 -0700 Subject: [PATCH] kern: exception flags are now atomic. This is a really weird one, because they didn't actually update the code which updates these flags in asm, these still use ldrb/orr/strb. But every access to these via c++ is now an atomic ldxrb/stxrb loop. Maybe they just forgot to update the asm? --- .../include/mesosphere/kern_k_thread.hpp | 8 +-- .../vapours/util/arch/arm64/util_atomic.hpp | 68 +++++++++---------- 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/libmesosphere/include/mesosphere/kern_k_thread.hpp b/libmesosphere/include/mesosphere/kern_k_thread.hpp index 487f3643..8904a6a9 100644 --- a/libmesosphere/include/mesosphere/kern_k_thread.hpp +++ b/libmesosphere/include/mesosphere/kern_k_thread.hpp @@ -105,7 +105,7 @@ namespace ams::kern { util::Atomic dpc_flags; u8 current_svc_id; u8 reserved_2c; - u8 exception_flags; + util::Atomic exception_flags; bool is_pinned; u8 reserved_2f; u8 reserved_30[0x10]; @@ -417,17 +417,17 @@ namespace ams::kern { private: ALWAYS_INLINE void SetExceptionFlag(ExceptionFlag flag) { MESOSPHERE_ASSERT_THIS(); - this->GetStackParameters().exception_flags |= flag; + this->GetStackParameters().exception_flags.FetchOr(flag); } ALWAYS_INLINE void ClearExceptionFlag(ExceptionFlag flag) { MESOSPHERE_ASSERT_THIS(); - this->GetStackParameters().exception_flags &= ~flag; + this->GetStackParameters().exception_flags.FetchAnd(~flag); } ALWAYS_INLINE bool IsExceptionFlagSet(ExceptionFlag flag) const { MESOSPHERE_ASSERT_THIS(); - return this->GetStackParameters().exception_flags & flag; + return this->GetStackParameters().exception_flags.Load() & flag; } public: /* ALWAYS_INLINE void SetCallingSvc() { return this->SetExceptionFlag(ExceptionFlag_IsCallingSvc); } */ diff --git a/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp b/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp index a06b54b5..c2ac6c52 100644 --- a/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp +++ b/libvapours/include/vapours/util/arch/arm64/util_atomic.hpp @@ -285,23 +285,23 @@ namespace ams::util { return impl::AtomicCompareExchangeStrongImpl(this->GetStoragePointer(), expected, desired); } - #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ - template::type> \ - ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - volatile StorageType * const p = this->GetStoragePointer(); \ - \ - StorageType current; \ - do { \ - current = impl::LoadAcquireExclusiveForAtomic(p); \ - } while (AMS_UNLIKELY(!impl::StoreReleaseExclusiveForAtomic(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg)))); \ - return ConvertToType(current); \ - } \ - \ - template::type> \ - ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \ + #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ + template::type> \ + ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + volatile StorageType * const p = this->GetStoragePointer(); \ + \ + StorageType current; \ + do { \ + current = impl::LoadExclusiveForAtomicByMemoryOrder(p); \ + } while (AMS_UNLIKELY((!impl::StoreExclusiveForAtomicByMemoryOrder(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg))))); \ + return ConvertToType(current); \ + } \ + \ + template::type> \ + ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + return this->Fetch ## _OPERATION_ (arg) _OPERATOR_ arg; \ } AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(Add, +, true) @@ -389,23 +389,23 @@ namespace ams::util { return impl::AtomicCompareExchangeStrongImpl(this->GetStoragePointer(), expected, desired); } - #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ - template::type> \ - ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) const { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - volatile StorageType * const p = this->GetStoragePointer(); \ - \ - StorageType current; \ - do { \ - current = impl::LoadAcquireExclusiveForAtomic(p); \ - } while (AMS_UNLIKELY(!impl::StoreReleaseExclusiveForAtomic(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg)))); \ - return ConvertToType(current); \ - } \ - \ - template::type> \ - ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) const { \ - static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ - return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \ + #define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \ + template::type> \ + ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) const { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + volatile StorageType * const p = this->GetStoragePointer(); \ + \ + StorageType current; \ + do { \ + current = impl::LoadExclusiveForAtomicByMemoryOrder(p); \ + } while (AMS_UNLIKELY((!impl::StoreExclusiveForAtomicByMemoryOrder(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg))))); \ + return ConvertToType(current); \ + } \ + \ + template::type> \ + ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) const { \ + static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \ + return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \ } AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(Add, +, true)