From aecbd66742f43ccfcac04ca4143fcc68ad834320 Mon Sep 17 00:00:00 2001 From: KJ Tsanaktsidis Date: Sun, 19 Nov 2023 22:54:01 +1100 Subject: [PATCH] Add RUBY_ATOMIC_{PTR_,}FETCH macros for atomic loads This can already be emulated by doing an atomic fetch_add of zero, but this is more explicit. [Bug #19994] --- include/ruby/atomic.h | 51 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/include/ruby/atomic.h b/include/ruby/atomic.h index 3eb80fbf7d..eb106631f6 100644 --- a/include/ruby/atomic.h +++ b/include/ruby/atomic.h @@ -138,6 +138,15 @@ typedef unsigned int rb_atomic_t; #define RUBY_ATOMIC_CAS(var, oldval, newval) \ rbimpl_atomic_cas(&(var), (oldval), (newval)) +/** + * Atomic load. This loads `var` with an atomic intrinsic and returns + * its value. + * + * @param var A variable of ::rb_atomic_t + * @return What was stored in `var`j + */ +#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var)) + /** * Identical to #RUBY_ATOMIC_EXCHANGE, except for the return type. * @@ -279,6 +288,17 @@ typedef unsigned int rb_atomic_t; #define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \ RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val)) +/** + * Identical to #RUBY_ATOMIC_LOAD, except it expects its arguments are `void*`. + * There are cases where ::rb_atomic_t is 32bit while `void*` is 64bit. This + * should be used for size related operations to support such platforms. + * + * @param var A variable of `void*` + * @return The value of `var` (without tearing) + */ +#define RUBY_ATOMIC_PTR_LOAD(var) \ + RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var)) + /** * Identical to #RUBY_ATOMIC_CAS, except it expects its arguments are `void*`. * There are cases where ::rb_atomic_t is 32bit while `void*` is 64bit. This @@ -746,6 +766,21 @@ rbimpl_atomic_value_exchange(volatile VALUE *ptr, VALUE val) return RBIMPL_CAST((VALUE)sret); } +RBIMPL_ATTR_ARTIFICIAL() +RBIMPL_ATTR_NOALIAS() +RBIMPL_ATTR_NONNULL((1)) +static inline rb_atomic_t +rbimpl_atomic_load(volatile rb_atomic_t *ptr) +{ +#if 0 + +#elif defined(HAVE_GCC_ATOMIC_BUILTINS) + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); +#else + return rbimpl_atomic_fetch_add(ptr, 0); +#endif +} + RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NONNULL((1)) @@ -872,6 +907,22 @@ rbimpl_atomic_ptr_cas(void **ptr, const void *oldval, const void *newval) #endif } +RBIMPL_ATTR_ARTIFICIAL() +RBIMPL_ATTR_NOALIAS() +RBIMPL_ATTR_NONNULL((1)) +static inline void * +rbimpl_atomic_ptr_load(void **ptr) +{ +#if 0 + +#elif defined(HAVE_GCC_ATOMIC_BUILTINS) + return __atomic_load_n(ptr, __ATOMIC_SEQ_CST); +#else + void *val = *ptr; + return rbimpl_atomic_ptr_cas(ptr, val, val); +#endif +} + RBIMPL_ATTR_ARTIFICIAL() RBIMPL_ATTR_NOALIAS() RBIMPL_ATTR_NONNULL((1))