Merge branch 'atomics-2' into 'master'

Atomics fixes

Closes #932

See merge request cairo/cairo!654
This commit is contained in:
Luca Bacci 2026-05-02 14:49:34 +00:00
commit 30d7b8dec4
7 changed files with 134 additions and 95 deletions

View file

@ -766,22 +766,28 @@ endforeach
extra_link_args += pthread_link_args
# Atomics are an optional feature in C11. Also need to check that C11 atomics are lock free.
# On Windows we use the Interlocked family of functions
if host_machine.system() != 'windows'
if cc.links(files('meson-cc-tests/atomic-ops-c11.c'), name: 'Atomic ops: c11')
conf.set('HAVE_C11_ATOMIC_PRIMITIVES', 1)
elif cc.links(files('meson-cc-tests/atomic-ops-cxx11.c'), name: 'Atomic ops: cxx11')
conf.set('HAVE_CXX11_ATOMIC_PRIMITIVES', 1)
elif cc.links(files('meson-cc-tests/atomic-ops-gcc-legacy.c'), name: 'Atomic ops: gcc legacy')
conf.set('HAVE_GCC_LEGACY_ATOMICS', 1)
elif cc.has_header('atomic_ops.h')
conf.set('HAVE_LIB_ATOMIC_OPS', 1)
elif cc.has_header('libkern/OSAtomic.h')
conf.set('HAVE_OS_ATOMIC_OPS', 1)
else
warning('Atomic ops not supported.')
endif
cpp_enabled = host_machine.system() == 'windows'
if not cpp_enabled and cc.links(files('meson-cc-tests/atomic-ops-c11.c'), name: 'Atomic ops: c11')
# Currently we avoid C11 atomics when using both C and C++. The standards
# do not guarantee compatibility between C11 atomics and C++11 std::atomic
# (though effort is underway, see C++/N2741). We can enable this for selected
# compilers over time.
#
# When not using C++, check if C11 atomics are available and whether atomic
# ints and pointers are lock-free.
conf.set('HAVE_C11_ATOMIC_PRIMITIVES', 1)
elif cc.links(files('meson-cc-tests/atomic-ops-cxx11.c'), name: 'Atomic ops: cxx11')
conf.set('HAVE_CXX11_ATOMIC_PRIMITIVES', 1)
elif cc.links(files('meson-cc-tests/atomic-ops-gcc-legacy.c'), name: 'Atomic ops: gcc legacy')
conf.set('HAVE_GCC_LEGACY_ATOMICS', 1)
elif host_machine.system() != 'windows' and dependency('atomic_ops', required: false).found()
internal_deps += [dependency('atomic_ops')]
conf.set('HAVE_LIB_ATOMIC_OPS', 1)
elif host_machine.system() == 'darwin' and cc.has_header('libkern/OSAtomic.h')
conf.set('HAVE_OS_ATOMIC_OPS', 1)
elif not cc.has_define('_MSC_VER')
warning('Atomic ops not supported.')
endif
test_mkdir_c_args = []
@ -801,10 +807,6 @@ else
conf.set('HAVE_MKDIR', 0)
endif
if not ['x86', 'x86_64'].contains(host_machine.cpu_family())
conf.set('ATOMIC_OP_NEEDS_MEMORY_BARRIER', 1)
endif
have_ld_preload = ['linux', 'freebsd', 'darwin', 'dragonfly', 'sunos'].contains(host_machine.system())
if have_ld_preload and zlib_dep.found() and conf.get('CAIRO_HAS_REAL_PTHREAD', 0) == 1 and conf.get('CAIRO_HAS_DLSYM', 0) == 1

View file

@ -75,9 +75,9 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
}
static cairo_always_inline void *
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
{
return atomic_load_explicit (x, memory_order_seq_cst);
return atomic_load_explicit (x, memory_order_relaxed);
}
# define _cairo_atomic_int_inc(x) ((void) atomic_fetch_add_explicit(x, 1, memory_order_seq_cst))
@ -169,9 +169,9 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
}
static cairo_always_inline void *
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
{
return (void*)__atomic_load_n(x, __ATOMIC_SEQ_CST);
return (void*)__atomic_load_n(x, __ATOMIC_RELAXED);
}
# define _cairo_atomic_int_inc(x) ((void) __atomic_fetch_add(x, 1, __ATOMIC_SEQ_CST))
@ -233,30 +233,31 @@ _cairo_atomic_ptr_cmpxchg_return_old_impl(cairo_atomic_intptr_t *x, void *oldv,
typedef int cairo_atomic_int_t;
typedef intptr_t cairo_atomic_intptr_t;
static cairo_always_inline int
_cairo_atomic_int_get (cairo_atomic_int_t *x)
{
__sync_synchronize ();
return *x;
}
static cairo_always_inline int
_cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x)
{
return *x;
return *(volatile int *)x;
}
static cairo_always_inline void
_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
{
*x = val;
*(volatile int *)x = val;
}
static cairo_always_inline int
_cairo_atomic_int_get (cairo_atomic_int_t *x)
{
int result = _cairo_atomic_int_get_relaxed (x);
__sync_synchronize ();
return result;
}
static cairo_always_inline void *
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
{
__sync_synchronize ();
return (void*)*x;
return *(void * volatile *)x;
}
# define _cairo_atomic_int_inc(x) ((void) __sync_fetch_and_add(x, 1))
@ -274,26 +275,35 @@ _cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
#endif /* HAVE_GCC_LEGACY_ATOMICS */
#if HAVE_LIB_ATOMIC_OPS
#include <atomic_ops.h>
#include <limits.h>
#define HAS_ATOMIC_OPS 1
typedef AO_t cairo_atomic_int_t;
typedef int cairo_atomic_int_t;
# define _cairo_atomic_int_get(x) (AO_load_full (x))
# define _cairo_atomic_int_get_relaxed(x) (AO_load_full (x))
# define _cairo_atomic_int_set_relaxed(x, val) (AO_store_full ((x), (val)))
/* Casts from signed to unsigned must not change representation */
static_assert((unsigned)-1 == UINT_MAX,
"We require two's complement representation of signed integrals");
# define _cairo_atomic_int_inc(x) ((void) AO_fetch_and_add1_full(x))
# define _cairo_atomic_int_dec(x) ((void) AO_fetch_and_sub1_full(x))
# define _cairo_atomic_int_dec_and_test(x) (AO_fetch_and_sub1_full(x) == 1)
# define _cairo_atomic_int_cmpxchg(x, oldv, newv) AO_compare_and_swap_full(x, oldv, newv)
# define _cairo_atomic_int_get(x) ((int)AO_int_load_full ((unsigned *)(x)))
# define _cairo_atomic_int_get_relaxed(x) ((int)AO_int_load ((unsigned *)(x)))
# define _cairo_atomic_int_set_relaxed(x, val) (AO_int_store ((unsigned *)(x), (unsigned)(val)))
typedef intptr_t cairo_atomic_intptr_t;
# define _cairo_atomic_int_inc(x) ((void) AO_int_fetch_and_add1_full((unsigned *)(x)))
# define _cairo_atomic_int_dec(x) ((void) AO_int_fetch_and_sub1_full((unsigned *)(x)))
# define _cairo_atomic_int_dec_and_test(x) (AO_int_fetch_and_sub1_full((unsigned *)(x)) == 1U)
# define _cairo_atomic_int_cmpxchg(x, oldv, newv) AO_int_compare_and_swap_full((unsigned *)x, (unsigned)(oldv), (unsigned)(newv))
# define _cairo_atomic_int_cmpxchg_return_old(x, oldv, newv) AO_int_fetch_compare_and_swap_full((unsigned *)x, (unsigned)(oldv), (unsigned)(newv))
# define _cairo_atomic_ptr_get(x) _cairo_atomic_intptr_to_voidptr (AO_load_full (x))
# define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) \
_cairo_atomic_int_cmpxchg ((cairo_atomic_intptr_t*)(x), (cairo_atomic_intptr_t)oldv, (cairo_atomic_intptr_t)newv)
typedef AO_t cairo_atomic_intptr_t;
static_assert (sizeof (AO_t) >= sizeof (void *), "AO_t cannot be used for pointers");
# define _cairo_atomic_ptr_get_relaxed(x) _cairo_atomic_intptr_to_voidptr (AO_load (x))
# define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) AO_compare_and_swap_full(x, (AO_t)oldv, (AO_t)newv)
# define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) AO_fetch_compare_and_swap_full(x, (AO_t)oldv, (AO_t)newv)
#endif
@ -305,9 +315,17 @@ typedef intptr_t cairo_atomic_intptr_t;
typedef int32_t cairo_atomic_int_t;
typedef intptr_t cairo_atomic_intptr_t;
# define _cairo_atomic_int_get(x) (OSMemoryBarrier(), *(x))
# define _cairo_atomic_int_get_relaxed(x) *(x)
# define _cairo_atomic_int_set_relaxed(x, val) *(x) = (val)
# define _cairo_atomic_int_get_relaxed(x) *(volatile int32_t *)(x)
# define _cairo_atomic_int_set_relaxed(x, val) *(volatile int32_t *)(x) = (val)
static cairo_always_inline int
_cairo_atomic_int_get (cairo_atomic_int_t *x)
{
int result = _cairo_atomic_int_get_relaxed (x);
OSMemoryBarrier ();
return result;
}
# define _cairo_atomic_int_inc(x) ((void) OSAtomicIncrement32Barrier (x))
# define _cairo_atomic_int_dec(x) ((void) OSAtomicDecrement32Barrier (x))
@ -326,59 +344,92 @@ typedef intptr_t cairo_atomic_intptr_t;
#error No matching integer pointer type
#endif
# define _cairo_atomic_ptr_get(x) (OSMemoryBarrier(), *(x))
# define _cairo_atomic_ptr_get_relaxed(x) (*(void * volatile *)(x))
#endif /* HAVE_OS_ATOMIC_OPS */
#if !defined(HAS_ATOMIC_OPS) && defined(_WIN32)
#if !defined(HAS_ATOMIC_OPS) && defined(_MSC_VER)
#include <windows.h>
#include <intrin.h>
#define HAS_ATOMIC_OPS 1
typedef LONG cairo_atomic_int_t;
typedef PVOID cairo_atomic_intptr_t;
# define _cairo_atomic_int_get_relaxed(x) __iso_volatile_load32 ((__int32 *) x)
# define _cairo_atomic_int_set_relaxed(x, val) __iso_volatile_store32 ((__int32 *) x, (__int32) val)
static cairo_always_inline int
_cairo_atomic_int_get (cairo_atomic_int_t *x)
{
int result = _cairo_atomic_int_get_relaxed (x);
#if defined (_M_IX86) || defined (_M_AMD64)
_ReadWriteBarrier (); /* compiler-only */
#else
MemoryBarrier ();
return *x;
}
#endif
# define _cairo_atomic_int_get_relaxed(x) *(x)
# define _cairo_atomic_int_set_relaxed(x, val) *(x) = (val)
return result;
}
# define _cairo_atomic_int_inc(x) ((void) InterlockedIncrement (x))
# define _cairo_atomic_int_dec(x) ((void) InterlockedDecrement (x))
# define _cairo_atomic_int_dec_and_test(x) (InterlockedDecrement (x) == 0)
static cairo_always_inline cairo_bool_t
_cairo_atomic_int_cmpxchg (cairo_atomic_int_t *x,
int oldv,
int newv)
_cairo_atomic_int_cmpxchg_impl (cairo_atomic_int_t *x,
int oldv,
int newv)
{
return InterlockedCompareExchange (x, (LONG)newv, (LONG)oldv) == oldv;
}
static cairo_always_inline void *
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
#define _cairo_atomic_int_cmpxchg(x, oldv, newv) \
_cairo_atomic_int_cmpxchg_impl(x, oldv, newv)
static cairo_always_inline int
_cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x,
int oldv,
int newv)
{
MemoryBarrier ();
return (void *) *x;
return (int) InterlockedCompareExchange (x, (LONG)newv, (LONG)oldv);
}
#define _cairo_atomic_int_cmpxchg_return_old(x, oldv, newv) \
_cairo_atomic_int_cmpxchg_return_old_impl(x, oldv, newv)
static cairo_always_inline void *
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
{
#if SIZEOF_VOID_P == 4
return (void *) __iso_volatile_load32 ((__int32 *) (void *) x);
#elif SIZEOF_VOID_P == 8
return (void *) __iso_volatile_load64 ((__int64 *) (void *) x);
#else
#error "unknown pointer size"
#endif
}
static cairo_always_inline cairo_bool_t
_cairo_atomic_ptr_cmpxchg (cairo_atomic_intptr_t *x, void *oldv, void *newv)
_cairo_atomic_ptr_cmpxchg_impl (cairo_atomic_intptr_t *x, void *oldv, void *newv)
{
return InterlockedCompareExchangePointer (x, newv, oldv) == oldv;
}
#define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) \
_cairo_atomic_ptr_cmpxchg_impl(x, oldv, newv)
static cairo_always_inline void *
_cairo_atomic_ptr_cmpxchg_return_old (cairo_atomic_intptr_t *x, void *oldv, void *newv)
_cairo_atomic_ptr_cmpxchg_return_old_impl (cairo_atomic_intptr_t *x, void *oldv, void *newv)
{
return InterlockedCompareExchangePointer (x, newv, oldv);
}
#define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) \
_cairo_atomic_ptr_cmpxchg_return_old_impl(x, oldv, newv)
#endif /* !defined(HAS_ATOMIC_OPS) && defined(_WIN32) */
@ -404,21 +455,17 @@ _cairo_atomic_ptr_cmpxchg_return_old_impl (cairo_atomic_intptr_t *x, void *oldv,
#define _cairo_atomic_int_cmpxchg_return_old(x, oldv, newv) _cairo_atomic_int_cmpxchg_return_old_impl (x, oldv, newv)
#define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) _cairo_atomic_ptr_cmpxchg_return_old_impl (x, oldv, newv)
#ifdef ATOMIC_OP_NEEDS_MEMORY_BARRIER
cairo_private int
_cairo_atomic_int_get (cairo_atomic_int_t *x);
cairo_private int
_cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x);
void
_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val);
cairo_private void*
_cairo_atomic_ptr_get(cairo_atomic_intptr_t *x);
#else
# define _cairo_atomic_int_get(x) (*x)
# define _cairo_atomic_int_get_relaxed(x) (*x)
# define _cairo_atomic_int_set_relaxed(x, val) (*x) = (val)
# define _cairo_atomic_ptr_get(x) (*x)
#endif
_cairo_atomic_ptr_get_relaxed(cairo_atomic_intptr_t *x);
#else
@ -435,7 +482,7 @@ _cairo_atomic_int_cmpxchg_return_old_fallback(cairo_atomic_int_t *x, int oldv, i
int curr;
do {
curr = _cairo_atomic_int_get (x);
curr = _cairo_atomic_int_get_relaxed (x);
} while (curr == oldv && !_cairo_atomic_int_cmpxchg (x, oldv, newv));
return curr;
@ -447,7 +494,7 @@ _cairo_atomic_ptr_cmpxchg_return_old_fallback(cairo_atomic_intptr_t *x, void *ol
void *curr;
do {
curr = _cairo_atomic_ptr_get (x);
curr = _cairo_atomic_ptr_get_relaxed (x);
} while (curr == oldv && !_cairo_atomic_ptr_cmpxchg (x, oldv, newv));
return curr;

View file

@ -89,7 +89,6 @@ _cairo_atomic_ptr_cmpxchg_return_old_impl (cairo_atomic_intptr_t *x, void *oldv,
return ret;
}
#ifdef ATOMIC_OP_NEEDS_MEMORY_BARRIER
int
_cairo_atomic_int_get (cairo_atomic_int_t *x)
{
@ -117,7 +116,7 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
}
void*
_cairo_atomic_ptr_get (void **x)
_cairo_atomic_ptr_get_relaxed (void **x)
{
void *ret;
@ -127,6 +126,5 @@ _cairo_atomic_ptr_get (void **x)
return ret;
}
#endif
#endif

View file

@ -60,7 +60,7 @@ _atomic_fetch (cairo_atomic_intptr_t *slot)
void *ptr;
do {
ptr = _cairo_atomic_ptr_get (slot);
ptr = _cairo_atomic_ptr_get_relaxed (slot);
} while (! _cairo_atomic_ptr_cmpxchg (slot, ptr, NULL));
return ptr;

View file

@ -798,26 +798,18 @@ _cairo_get_locale_decimal_point (void)
#if defined (HAVE_NEWLOCALE) && defined (HAVE_STRTOD_L)
static cairo_atomic_once_t C_locale_once;
static locale_t C_locale;
static locale_t
get_C_locale (void)
{
locale_t C;
retry:
C = (locale_t) _cairo_atomic_ptr_get ((cairo_atomic_intptr_t *) &C_locale);
if (unlikely (!C)) {
C = newlocale (LC_ALL_MASK, "C", NULL);
if (!_cairo_atomic_ptr_cmpxchg ((cairo_atomic_intptr_t *) &C_locale, NULL, C)) {
freelocale (C_locale);
goto retry;
}
if (_cairo_atomic_init_once_enter (&C_locale_once)) {
C_locale = newlocale (LC_ALL_MASK, "C", NULL);
_cairo_atomic_init_once_leave (&C_locale_once);
}
return C;
return C_locale;
}
double

View file

@ -63,7 +63,7 @@ CAIRO_MUTEX_DECLARE (_cairo_xlib_display_mutex)
CAIRO_MUTEX_DECLARE (_cairo_xcb_connections_mutex)
#endif
#if !defined (HAS_ATOMIC_OPS) || defined (ATOMIC_OP_NEEDS_MEMORY_BARRIER)
#if !defined (HAS_ATOMIC_OPS)
CAIRO_MUTEX_DECLARE (_cairo_atomic_mutex)
#endif

View file

@ -50,7 +50,7 @@ typedef struct {
#define CAIRO_REFERENCE_COUNT_INIT(RC, VALUE) ((RC)->ref_count = (VALUE))
#define CAIRO_REFERENCE_COUNT_GET_VALUE(RC) _cairo_atomic_int_get (&(RC)->ref_count)
#define CAIRO_REFERENCE_COUNT_GET_VALUE(RC) _cairo_atomic_int_get_relaxed (&(RC)->ref_count)
#define CAIRO_REFERENCE_COUNT_INVALID_VALUE ((int) -1)
#define CAIRO_REFERENCE_COUNT_INVALID {CAIRO_REFERENCE_COUNT_INVALID_VALUE}