kern: exception flags are now atomic.

This is a really weird one, because they didn't actually update the
code which updates these flags in asm, these still use ldrb/orr/strb.
But every access to these via c++ is now an atomic ldxrb/stxrb loop.
Maybe they just forgot to update the asm?
This commit is contained in:
Michael Scire 2025-11-11 19:34:34 -07:00 committed by SciresM
parent 2a44550dbe
commit ac382f69e7
2 changed files with 38 additions and 38 deletions

View File

@ -105,7 +105,7 @@ namespace ams::kern {
util::Atomic<u8> dpc_flags;
u8 current_svc_id;
u8 reserved_2c;
u8 exception_flags;
util::Atomic<u8> exception_flags;
bool is_pinned;
u8 reserved_2f;
u8 reserved_30[0x10];
@ -417,17 +417,17 @@ namespace ams::kern {
private:
ALWAYS_INLINE void SetExceptionFlag(ExceptionFlag flag) {
MESOSPHERE_ASSERT_THIS();
this->GetStackParameters().exception_flags |= flag;
this->GetStackParameters().exception_flags.FetchOr<std::memory_order_relaxed>(flag);
}
ALWAYS_INLINE void ClearExceptionFlag(ExceptionFlag flag) {
MESOSPHERE_ASSERT_THIS();
this->GetStackParameters().exception_flags &= ~flag;
this->GetStackParameters().exception_flags.FetchAnd<std::memory_order_relaxed>(~flag);
}
ALWAYS_INLINE bool IsExceptionFlagSet(ExceptionFlag flag) const {
MESOSPHERE_ASSERT_THIS();
return this->GetStackParameters().exception_flags & flag;
return this->GetStackParameters().exception_flags.Load<std::memory_order_relaxed>() & flag;
}
public:
/* ALWAYS_INLINE void SetCallingSvc() { return this->SetExceptionFlag(ExceptionFlag_IsCallingSvc); } */

View File

@ -285,23 +285,23 @@ namespace ams::util {
return impl::AtomicCompareExchangeStrongImpl<Order, T>(this->GetStoragePointer(), expected, desired);
}
#define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \
template<bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
volatile StorageType * const p = this->GetStoragePointer(); \
\
StorageType current; \
do { \
current = impl::LoadAcquireExclusiveForAtomic<StorageType>(p); \
} while (AMS_UNLIKELY(!impl::StoreReleaseExclusiveForAtomic<StorageType>(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg)))); \
return ConvertToType(current); \
} \
\
template<bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \
#define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \
template<std::memory_order Order = std::memory_order_seq_cst, bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
volatile StorageType * const p = this->GetStoragePointer(); \
\
StorageType current; \
do { \
current = impl::LoadExclusiveForAtomicByMemoryOrder<Order, StorageType>(p); \
} while (AMS_UNLIKELY((!impl::StoreExclusiveForAtomicByMemoryOrder<Order, StorageType>(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg))))); \
return ConvertToType(current); \
} \
\
template<bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
return this->Fetch ## _OPERATION_ (arg) _OPERATOR_ arg; \
}
AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(Add, +, true)
@ -389,23 +389,23 @@ namespace ams::util {
return impl::AtomicCompareExchangeStrongImpl<Order, T>(this->GetStoragePointer(), expected, desired);
}
#define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \
template<bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) const { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
volatile StorageType * const p = this->GetStoragePointer(); \
\
StorageType current; \
do { \
current = impl::LoadAcquireExclusiveForAtomic<StorageType>(p); \
} while (AMS_UNLIKELY(!impl::StoreReleaseExclusiveForAtomic<StorageType>(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg)))); \
return ConvertToType(current); \
} \
\
template<bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) const { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \
#define AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(_OPERATION_, _OPERATOR_, _POINTER_ALLOWED_) \
template<std::memory_order Order = std::memory_order_seq_cst, bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T Fetch ## _OPERATION_(DifferenceType arg) const { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
volatile StorageType * const p = this->GetStoragePointer(); \
\
StorageType current; \
do { \
current = impl::LoadExclusiveForAtomicByMemoryOrder<Order, StorageType>(p); \
} while (AMS_UNLIKELY((!impl::StoreExclusiveForAtomicByMemoryOrder<Order, StorageType>(p, ConvertToStorage(ConvertToType(current) _OPERATOR_ arg))))); \
return ConvertToType(current); \
} \
\
template<bool Enable = (IsIntegral || (_POINTER_ALLOWED_ && IsPointer)), typename = typename std::enable_if<Enable, void>::type> \
ALWAYS_INLINE T operator _OPERATOR_##=(DifferenceType arg) const { \
static_assert(Enable == (IsIntegral || (_POINTER_ALLOWED_ && IsPointer))); \
return this->Fetch ## _OPERATION_(arg) _OPERATOR_ arg; \
}
AMS_UTIL_IMPL_DEFINE_ATOMIC_FETCH_OPERATE_FUNCTION(Add, +, true)