From 87f7c60bf7c20079c34cfc54e4db05920d14db56 Mon Sep 17 00:00:00 2001 From: Adrian Johnson Date: Sat, 27 May 2023 21:21:47 +0930 Subject: [PATCH] Add support for C11 atomics --- meson-cc-tests/atomic-ops-c11.c | 12 ++ meson.build | 8 +- src/cairo-atomic-private.h | 187 ++++++++++++++++++++-------- src/cairo-atomic.c | 20 +-- src/cairo-freed-pool-private.h | 6 +- src/cairo-freed-pool.c | 3 +- src/cairo-misc.c | 4 +- src/cairo-mutex-private.h | 6 + src/cairo-mutex.c | 4 + src/cairo-recording-surface.c | 2 +- src/cairo-reference-count-private.h | 2 +- src/cairo-surface.c | 2 +- src/win32/cairo-win32-device.c | 2 +- 13 files changed, 184 insertions(+), 74 deletions(-) create mode 100644 meson-cc-tests/atomic-ops-c11.c diff --git a/meson-cc-tests/atomic-ops-c11.c b/meson-cc-tests/atomic-ops-c11.c new file mode 100644 index 000000000..4d60e998f --- /dev/null +++ b/meson-cc-tests/atomic-ops-c11.c @@ -0,0 +1,12 @@ +#include +#include + +/* We require lock free atomics for int and pointers as cairo assumes + * an int* can be cast to cairo_atomic_int_t* + */ +_Static_assert (ATOMIC_INT_LOCK_FREE == 2, "Lock free atomics not supported"); +_Static_assert (ATOMIC_POINTER_LOCK_FREE == 2, "Lock free atomics not supported"); + +int atomic_add(atomic_int *i) { return atomic_fetch_add_explicit(i, 1, memory_order_seq_cst); } +int atomic_cmpxchg(atomic_int *i, int j, int k) { return atomic_compare_exchange_strong_explicit (i, &j, k, memory_order_seq_cst, memory_order_seq_cst); } +int main(void) { return 0; } diff --git a/meson.build b/meson.build index 3a5820640..151c46411 100644 --- a/meson.build +++ b/meson.build @@ -726,7 +726,11 @@ endforeach extra_link_args += pthread_link_args -if cc.links(files('meson-cc-tests/atomic-ops-cxx11.c'), name: 'Atomic ops: cxx11') +# Atomics are an optional feature in C11. Also need to check that C11 atomics are lock free. +# Windows can't use C11 atomics as some files are compiled with C++. +if host_machine.system() != 'windows' and cc.links(files('meson-cc-tests/atomic-ops-c11.c'), name: 'Atomic ops: c11') + conf.set('HAVE_C11_ATOMIC_PRIMITIVES', 1) +elif cc.links(files('meson-cc-tests/atomic-ops-cxx11.c'), name: 'Atomic ops: cxx11') conf.set('HAVE_CXX11_ATOMIC_PRIMITIVES', 1) elif cc.links(files('meson-cc-tests/atomic-ops-gcc-legacy.c'), name: 'Atomic ops: gcc legacy') conf.set('HAVE_GCC_LEGACY_ATOMICS', 1) @@ -734,6 +738,8 @@ elif cc.has_header('atomic_ops.h') conf.set('HAVE_LIB_ATOMIC_OPS', 1) elif cc.has_header('libkern/OSAtomic.h') conf.set('HAVE_OS_ATOMIC_OPS', 1) +elif host_machine.system() != 'windows' + warning('Atomic ops not supported.') endif test_mkdir_c_args = [] diff --git a/src/cairo-atomic-private.h b/src/cairo-atomic-private.h index dd49e618b..feba95691 100644 --- a/src/cairo-atomic-private.h +++ b/src/cairo-atomic-private.h @@ -47,6 +47,92 @@ CAIRO_BEGIN_DECLS +#if HAVE_C11_ATOMIC_PRIMITIVES + +#include + +#define HAS_ATOMIC_OPS 1 + +typedef atomic_int cairo_atomic_int_t; +typedef _Atomic(void *) cairo_atomic_intptr_t; + +static cairo_always_inline int +_cairo_atomic_int_get (cairo_atomic_int_t *x) +{ + return atomic_load_explicit (x, memory_order_seq_cst); +} + +static cairo_always_inline int +_cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x) +{ + return atomic_load_explicit (x, memory_order_relaxed); +} + +static cairo_always_inline void +_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val) +{ + atomic_store_explicit (x, val, memory_order_relaxed); +} + +static cairo_always_inline void * +_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x) +{ + return atomic_load_explicit (x, memory_order_seq_cst); +} + +# define _cairo_atomic_int_inc(x) ((void) atomic_fetch_add_explicit(x, 1, memory_order_seq_cst)) +# define _cairo_atomic_int_dec(x) ((void) atomic_fetch_sub_explicit(x, 1, memory_order_seq_cst)) +# define _cairo_atomic_int_dec_and_test(x) (atomic_fetch_sub_explicit(x, 1, memory_order_seq_cst) == 1) + + +static cairo_always_inline cairo_bool_t +_cairo_atomic_int_cmpxchg_impl(cairo_atomic_int_t *x, + int oldv, + int newv) +{ + int expected = oldv; + return atomic_compare_exchange_strong_explicit (x, &expected, newv, memory_order_seq_cst, memory_order_seq_cst); +} + +#define _cairo_atomic_int_cmpxchg(x, oldv, newv) \ + _cairo_atomic_int_cmpxchg_impl(x, oldv, newv) + +static cairo_always_inline int +_cairo_atomic_int_cmpxchg_return_old_impl(cairo_atomic_int_t *x, + int oldv, + int newv) +{ + int expected = oldv; + (void) atomic_compare_exchange_strong_explicit (x, &expected, newv, memory_order_seq_cst, memory_order_seq_cst); + return expected; +} + +#define _cairo_atomic_int_cmpxchg_return_old(x, oldv, newv) \ + _cairo_atomic_int_cmpxchg_return_old_impl(x, oldv, newv) + +static cairo_always_inline cairo_bool_t +_cairo_atomic_ptr_cmpxchg_impl(cairo_atomic_intptr_t *x, void *oldv, void *newv) +{ + void *expected = oldv; + return atomic_compare_exchange_strong_explicit (x, &expected, newv, memory_order_seq_cst, memory_order_seq_cst); +} + +#define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) \ + _cairo_atomic_ptr_cmpxchg_impl(x, oldv, newv) + +static cairo_always_inline void * +_cairo_atomic_ptr_cmpxchg_return_old_impl(cairo_atomic_intptr_t *x, void *oldv, void *newv) +{ + void *expected = oldv; + (void) atomic_compare_exchange_strong_explicit (x, &expected, newv, memory_order_seq_cst, memory_order_seq_cst); + return expected; +} + +#define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) \ + _cairo_atomic_ptr_cmpxchg_return_old_impl(x, oldv, newv) + +#endif /* HAVE_C11_ATOMIC_PRIMITIVES */ + /* C++11 atomic primitives were designed to be more flexible than the * __sync_* family of primitives. Despite the name, they are available * in C as well as C++. The motivating reason for using them is that @@ -62,55 +148,54 @@ CAIRO_BEGIN_DECLS #define HAS_ATOMIC_OPS 1 typedef int cairo_atomic_int_t; +typedef intptr_t cairo_atomic_intptr_t; -static cairo_always_inline cairo_atomic_int_t +static cairo_always_inline int _cairo_atomic_int_get (cairo_atomic_int_t *x) { return __atomic_load_n(x, __ATOMIC_SEQ_CST); } -static cairo_always_inline cairo_atomic_int_t +static cairo_always_inline int _cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x) { return __atomic_load_n(x, __ATOMIC_RELAXED); } static cairo_always_inline void -_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, cairo_atomic_int_t val) +_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val) { __atomic_store_n(x, val, __ATOMIC_RELAXED); } static cairo_always_inline void * -_cairo_atomic_ptr_get (void **x) +_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x) { - return __atomic_load_n(x, __ATOMIC_SEQ_CST); + return (void*)__atomic_load_n(x, __ATOMIC_SEQ_CST); } # define _cairo_atomic_int_inc(x) ((void) __atomic_fetch_add(x, 1, __ATOMIC_SEQ_CST)) # define _cairo_atomic_int_dec(x) ((void) __atomic_fetch_sub(x, 1, __ATOMIC_SEQ_CST)) # define _cairo_atomic_int_dec_and_test(x) (__atomic_fetch_sub(x, 1, __ATOMIC_SEQ_CST) == 1) -typedef intptr_t cairo_atomic_intptr_t; - static cairo_always_inline cairo_bool_t _cairo_atomic_int_cmpxchg_impl(cairo_atomic_int_t *x, - cairo_atomic_int_t oldv, - cairo_atomic_int_t newv) + int oldv, + int newv) { - cairo_atomic_int_t expected = oldv; + int expected = oldv; return __atomic_compare_exchange_n(x, &expected, newv, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); } #define _cairo_atomic_int_cmpxchg(x, oldv, newv) \ _cairo_atomic_int_cmpxchg_impl(x, oldv, newv) -static cairo_always_inline cairo_atomic_int_t +static cairo_always_inline int _cairo_atomic_int_cmpxchg_return_old_impl(cairo_atomic_int_t *x, - cairo_atomic_int_t oldv, - cairo_atomic_int_t newv) + int oldv, + int newv) { - cairo_atomic_int_t expected = oldv; + int expected = oldv; (void) __atomic_compare_exchange_n(x, &expected, newv, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); return expected; } @@ -119,7 +204,7 @@ _cairo_atomic_int_cmpxchg_return_old_impl(cairo_atomic_int_t *x, _cairo_atomic_int_cmpxchg_return_old_impl(x, oldv, newv) static cairo_always_inline cairo_bool_t -_cairo_atomic_ptr_cmpxchg_impl(void **x, void *oldv, void *newv) +_cairo_atomic_ptr_cmpxchg_impl(cairo_atomic_intptr_t *x, void *oldv, void *newv) { void *expected = oldv; return __atomic_compare_exchange_n(x, &expected, newv, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); @@ -129,7 +214,7 @@ _cairo_atomic_ptr_cmpxchg_impl(void **x, void *oldv, void *newv) _cairo_atomic_ptr_cmpxchg_impl(x, oldv, newv) static cairo_always_inline void * -_cairo_atomic_ptr_cmpxchg_return_old_impl(void **x, void *oldv, void *newv) +_cairo_atomic_ptr_cmpxchg_return_old_impl(cairo_atomic_intptr_t *x, void *oldv, void *newv) { void *expected = oldv; (void) __atomic_compare_exchange_n(x, &expected, newv, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST); @@ -139,38 +224,39 @@ _cairo_atomic_ptr_cmpxchg_return_old_impl(void **x, void *oldv, void *newv) #define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) \ _cairo_atomic_ptr_cmpxchg_return_old_impl(x, oldv, newv) -#endif +#endif /* HAVE_CXX11_ATOMIC_PRIMITIVES */ #if HAVE_GCC_LEGACY_ATOMICS #define HAS_ATOMIC_OPS 1 typedef int cairo_atomic_int_t; +typedef intptr_t cairo_atomic_intptr_t; -static cairo_always_inline cairo_atomic_int_t +static cairo_always_inline int _cairo_atomic_int_get (cairo_atomic_int_t *x) { __sync_synchronize (); return *x; } -static cairo_always_inline cairo_atomic_int_t +static cairo_always_inline int _cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x) { return *x; } static cairo_always_inline void -_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, cairo_atomic_int_t val) +_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val) { *x = val; } static cairo_always_inline void * -_cairo_atomic_ptr_get (void **x) +_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x) { __sync_synchronize (); - return *x; + return (void*)*x; } # define _cairo_atomic_int_inc(x) ((void) __sync_fetch_and_add(x, 1)) @@ -179,15 +265,13 @@ _cairo_atomic_ptr_get (void **x) # define _cairo_atomic_int_cmpxchg(x, oldv, newv) __sync_bool_compare_and_swap (x, oldv, newv) # define _cairo_atomic_int_cmpxchg_return_old(x, oldv, newv) __sync_val_compare_and_swap (x, oldv, newv) -typedef intptr_t cairo_atomic_intptr_t; - # define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) \ __sync_bool_compare_and_swap ((cairo_atomic_intptr_t*)x, (cairo_atomic_intptr_t)oldv, (cairo_atomic_intptr_t)newv) # define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) \ _cairo_atomic_intptr_to_voidptr (__sync_val_compare_and_swap ((cairo_atomic_intptr_t*)x, (cairo_atomic_intptr_t)oldv, (cairo_atomic_intptr_t)newv)) -#endif +#endif /* HAVE_GCC_LEGACY_ATOMICS */ #if HAVE_LIB_ATOMIC_OPS #include @@ -219,6 +303,7 @@ typedef intptr_t cairo_atomic_intptr_t; #define HAS_ATOMIC_OPS 1 typedef int32_t cairo_atomic_int_t; +typedef intptr_t cairo_atomic_intptr_t; # define _cairo_atomic_int_get(x) (OSMemoryBarrier(), *(x)) # define _cairo_atomic_int_get_relaxed(x) *(x) @@ -229,8 +314,6 @@ typedef int32_t cairo_atomic_int_t; # define _cairo_atomic_int_dec_and_test(x) (OSAtomicDecrement32Barrier (x) == 0) # define _cairo_atomic_int_cmpxchg(x, oldv, newv) OSAtomicCompareAndSwap32Barrier(oldv, newv, x) -typedef intptr_t cairo_atomic_intptr_t; - #if SIZEOF_VOID_P==4 # define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) \ OSAtomicCompareAndSwap32Barrier((cairo_atomic_intptr_t)oldv, (cairo_atomic_intptr_t)newv, (cairo_atomic_intptr_t *)x) @@ -245,17 +328,17 @@ typedef intptr_t cairo_atomic_intptr_t; # define _cairo_atomic_ptr_get(x) (OSMemoryBarrier(), *(x)) -#endif +#endif /* HAVE_OS_ATOMIC_OPS */ #if !defined(HAS_ATOMIC_OPS) && defined(_WIN32) #include #define HAS_ATOMIC_OPS 1 -typedef int32_t cairo_atomic_int_t; -typedef intptr_t cairo_atomic_intptr_t; +typedef LONG cairo_atomic_int_t; +typedef PVOID cairo_atomic_intptr_t; -static cairo_always_inline cairo_atomic_int_t +static cairo_always_inline int _cairo_atomic_int_get (cairo_atomic_int_t *x) { MemoryBarrier (); @@ -265,33 +348,33 @@ _cairo_atomic_int_get (cairo_atomic_int_t *x) # define _cairo_atomic_int_get_relaxed(x) *(x) # define _cairo_atomic_int_set_relaxed(x, val) *(x) = (val) -# define _cairo_atomic_int_inc(x) ((void) InterlockedIncrement ((LONG*)x)) -# define _cairo_atomic_int_dec(x) ((void) InterlockedDecrement ((LONG*)x)) -# define _cairo_atomic_int_dec_and_test(x) (InterlockedDecrement ((LONG*)x) == 0) +# define _cairo_atomic_int_inc(x) ((void) InterlockedIncrement (x)) +# define _cairo_atomic_int_dec(x) ((void) InterlockedDecrement (x)) +# define _cairo_atomic_int_dec_and_test(x) (InterlockedDecrement (x) == 0) static cairo_always_inline cairo_bool_t _cairo_atomic_int_cmpxchg (cairo_atomic_int_t *x, - cairo_atomic_int_t oldv, - cairo_atomic_int_t newv) + int oldv, + int newv) { - return InterlockedCompareExchange ((LONG*)x, (LONG)newv, (LONG)oldv) == oldv; + return InterlockedCompareExchange (x, (LONG)newv, (LONG)oldv) == oldv; } static cairo_always_inline void * -_cairo_atomic_ptr_get (void **x) +_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x) { MemoryBarrier (); - return *x; + return (void *) *x; } static cairo_always_inline cairo_bool_t -_cairo_atomic_ptr_cmpxchg (void **x, void *oldv, void *newv) +_cairo_atomic_ptr_cmpxchg (cairo_atomic_intptr_t *x, void *oldv, void *newv) { return InterlockedCompareExchangePointer (x, newv, oldv) == oldv; } static cairo_always_inline void * -_cairo_atomic_ptr_cmpxchg_return_old (void **x, void *oldv, void *newv) +_cairo_atomic_ptr_cmpxchg_return_old (cairo_atomic_intptr_t *x, void *oldv, void *newv) { return InterlockedCompareExchangePointer (x, newv, oldv); } @@ -312,24 +395,24 @@ _cairo_atomic_int_inc (cairo_atomic_int_t *x); cairo_private cairo_bool_t _cairo_atomic_int_dec_and_test (cairo_atomic_int_t *x); -cairo_private cairo_atomic_int_t -_cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x, cairo_atomic_int_t oldv, cairo_atomic_int_t newv); +cairo_private int +_cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x, int oldv, int newv); cairo_private void * -_cairo_atomic_ptr_cmpxchg_return_old_impl (void **x, void *oldv, void *newv); +_cairo_atomic_ptr_cmpxchg_return_old_impl (cairo_atomic_intptr_t *x, void *oldv, void *newv); #define _cairo_atomic_int_cmpxchg_return_old(x, oldv, newv) _cairo_atomic_int_cmpxchg_return_old_impl (x, oldv, newv) #define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) _cairo_atomic_ptr_cmpxchg_return_old_impl (x, oldv, newv) #ifdef ATOMIC_OP_NEEDS_MEMORY_BARRIER -cairo_private cairo_atomic_int_t +cairo_private int _cairo_atomic_int_get (cairo_atomic_int_t *x); -cairo_private cairo_atomic_int_t +cairo_private int _cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x); void -_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, cairo_atomic_int_t val); +_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val); cairo_private void* -_cairo_atomic_ptr_get(void **x); +_cairo_atomic_ptr_get(cairo_atomic_intptr_t *x); #else # define _cairo_atomic_int_get(x) (*x) # define _cairo_atomic_int_get_relaxed(x) (*x) @@ -346,10 +429,10 @@ _cairo_atomic_intptr_to_voidptr (cairo_atomic_intptr_t x) return (void *) x; } -static cairo_always_inline cairo_atomic_int_t -_cairo_atomic_int_cmpxchg_return_old_fallback(cairo_atomic_int_t *x, cairo_atomic_int_t oldv, cairo_atomic_int_t newv) +static cairo_always_inline int +_cairo_atomic_int_cmpxchg_return_old_fallback(cairo_atomic_int_t *x, int oldv, int newv) { - cairo_atomic_int_t curr; + int curr; do { curr = _cairo_atomic_int_get (x); @@ -359,7 +442,7 @@ _cairo_atomic_int_cmpxchg_return_old_fallback(cairo_atomic_int_t *x, cairo_atomi } static cairo_always_inline void * -_cairo_atomic_ptr_cmpxchg_return_old_fallback(void **x, void *oldv, void *newv) +_cairo_atomic_ptr_cmpxchg_return_old_fallback(cairo_atomic_intptr_t *x, void *oldv, void *newv) { void *curr; diff --git a/src/cairo-atomic.c b/src/cairo-atomic.c index 3c4d51972..26966d007 100644 --- a/src/cairo-atomic.c +++ b/src/cairo-atomic.c @@ -61,10 +61,10 @@ _cairo_atomic_int_dec_and_test (cairo_atomic_int_t *x) return ret; } -cairo_atomic_int_t -_cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x, cairo_atomic_int_t oldv, cairo_atomic_int_t newv) +int +_cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x, int oldv, int newv) { - cairo_atomic_int_t ret; + int ret; CAIRO_MUTEX_LOCK (_cairo_atomic_mutex); ret = *x; @@ -76,24 +76,24 @@ _cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x, cairo_atomic_i } void * -_cairo_atomic_ptr_cmpxchg_return_old_impl (void **x, void *oldv, void *newv) +_cairo_atomic_ptr_cmpxchg_return_old_impl (cairo_atomic_intptr_t *x, void *oldv, void *newv) { void *ret; CAIRO_MUTEX_LOCK (_cairo_atomic_mutex); - ret = *x; + ret = (void *) *x; if (ret == oldv) - *x = newv; + *x = (cairo_atomic_intptr_t) newv; CAIRO_MUTEX_UNLOCK (_cairo_atomic_mutex); return ret; } #ifdef ATOMIC_OP_NEEDS_MEMORY_BARRIER -cairo_atomic_int_t +int _cairo_atomic_int_get (cairo_atomic_int_t *x) { - cairo_atomic_int_t ret; + int ret; CAIRO_MUTEX_LOCK (_cairo_atomic_mutex); ret = *x; @@ -102,14 +102,14 @@ _cairo_atomic_int_get (cairo_atomic_int_t *x) return ret; } -cairo_atomic_int_t +int _cairo_atomic_int_get_relaxed (cairo_atomic_int_t *x) { return _cairo_atomic_int_get (x); } void -_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, cairo_atomic_int_t val) +_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val) { CAIRO_MUTEX_LOCK (_cairo_atomic_mutex); *x = val; diff --git a/src/cairo-freed-pool-private.h b/src/cairo-freed-pool-private.h index 8a7af523d..ced1adb3d 100644 --- a/src/cairo-freed-pool-private.h +++ b/src/cairo-freed-pool-private.h @@ -50,12 +50,12 @@ CAIRO_BEGIN_DECLS */ #define MAX_FREED_POOL_SIZE 16 typedef struct { - void *pool[MAX_FREED_POOL_SIZE]; + cairo_atomic_intptr_t pool[MAX_FREED_POOL_SIZE]; cairo_atomic_int_t top; } freed_pool_t; static cairo_always_inline void * -_atomic_fetch (void **slot) +_atomic_fetch (cairo_atomic_intptr_t *slot) { void *ptr; @@ -67,7 +67,7 @@ _atomic_fetch (void **slot) } static cairo_always_inline cairo_bool_t -_atomic_store (void **slot, void *ptr) +_atomic_store (cairo_atomic_intptr_t *slot, void *ptr) { return _cairo_atomic_ptr_cmpxchg (slot, NULL, ptr); } diff --git a/src/cairo-freed-pool.c b/src/cairo-freed-pool.c index 5b1c4c0bb..6d46ab1c1 100644 --- a/src/cairo-freed-pool.c +++ b/src/cairo-freed-pool.c @@ -83,8 +83,7 @@ _freed_pool_reset (freed_pool_t *pool) int i; for (i = 0; i < ARRAY_LENGTH (pool->pool); i++) { - free (pool->pool[i]); - pool->pool[i] = NULL; + free (_atomic_fetch (&pool->pool[i])); } _cairo_atomic_int_set_relaxed (&pool->top, 0); diff --git a/src/cairo-misc.c b/src/cairo-misc.c index 6e3189740..23e3eaf0b 100644 --- a/src/cairo-misc.c +++ b/src/cairo-misc.c @@ -803,12 +803,12 @@ get_C_locale (void) locale_t C; retry: - C = (locale_t) _cairo_atomic_ptr_get ((void **) &C_locale); + C = (locale_t) _cairo_atomic_ptr_get ((cairo_atomic_intptr_t *) &C_locale); if (unlikely (!C)) { C = newlocale (LC_ALL_MASK, "C", NULL); - if (!_cairo_atomic_ptr_cmpxchg ((void **) &C_locale, NULL, C)) { + if (!_cairo_atomic_ptr_cmpxchg ((cairo_atomic_intptr_t *) &C_locale, NULL, C)) { freelocale (C_locale); goto retry; } diff --git a/src/cairo-mutex-private.h b/src/cairo-mutex-private.h index 65732a180..6b3eb0365 100644 --- a/src/cairo-mutex-private.h +++ b/src/cairo-mutex-private.h @@ -53,9 +53,15 @@ cairo_private void _cairo_mutex_finalize (void); #endif /* only if using static initializer and/or finalizer define the boolean */ #if _CAIRO_MUTEX_IMPL_USE_STATIC_INITIALIZER || _CAIRO_MUTEX_IMPL_USE_STATIC_FINALIZER + +#if HAS_ATOMIC_OPS + cairo_private extern cairo_atomic_int_t _cairo_mutex_initialized; +#else cairo_private extern int _cairo_mutex_initialized; #endif +#endif + /* Finally, extern the static mutexes and undef */ #define CAIRO_MUTEX_DECLARE(mutex) cairo_private extern cairo_mutex_t mutex; diff --git a/src/cairo-mutex.c b/src/cairo-mutex.c index 5b6debeca..89acad525 100644 --- a/src/cairo-mutex.c +++ b/src/cairo-mutex.c @@ -51,7 +51,11 @@ # define _CAIRO_MUTEX_IMPL_INITIALIZED_DEFAULT_VALUE _CAIRO_MUTEX_INITIALIZED # endif +#if HAS_ATOMIC_OPS +cairo_atomic_int_t _cairo_mutex_initialized = _CAIRO_MUTEX_IMPL_INITIALIZED_DEFAULT_VALUE; +#else int _cairo_mutex_initialized = _CAIRO_MUTEX_IMPL_INITIALIZED_DEFAULT_VALUE; +#endif # undef _CAIRO_MUTEX_IMPL_INITIALIZED_DEFAULT_VALUE diff --git a/src/cairo-recording-surface.c b/src/cairo-recording-surface.c index e2f74e905..59428c8a4 100644 --- a/src/cairo-recording-surface.c +++ b/src/cairo-recording-surface.c @@ -1735,7 +1735,7 @@ _cairo_recording_surface_regions_allocate_unique_id (void) unique_id = 1; return unique_id; #else - cairo_atomic_int_t old, id; + int old, id; do { old = _cairo_atomic_uint_get (&unique_id); diff --git a/src/cairo-reference-count-private.h b/src/cairo-reference-count-private.h index 75fdf3538..f19125c61 100644 --- a/src/cairo-reference-count-private.h +++ b/src/cairo-reference-count-private.h @@ -52,7 +52,7 @@ typedef struct { #define CAIRO_REFERENCE_COUNT_GET_VALUE(RC) _cairo_atomic_int_get (&(RC)->ref_count) -#define CAIRO_REFERENCE_COUNT_INVALID_VALUE ((cairo_atomic_int_t) -1) +#define CAIRO_REFERENCE_COUNT_INVALID_VALUE ((int) -1) #define CAIRO_REFERENCE_COUNT_INVALID {CAIRO_REFERENCE_COUNT_INVALID_VALUE} #define CAIRO_REFERENCE_COUNT_IS_INVALID(RC) (CAIRO_REFERENCE_COUNT_GET_VALUE (RC) == CAIRO_REFERENCE_COUNT_INVALID_VALUE) diff --git a/src/cairo-surface.c b/src/cairo-surface.c index 657146790..a8117d699 100644 --- a/src/cairo-surface.c +++ b/src/cairo-surface.c @@ -277,7 +277,7 @@ _cairo_surface_allocate_unique_id (void) unique_id = 1; return unique_id; #else - cairo_atomic_int_t old, id; + int old, id; do { old = _cairo_atomic_uint_get (&unique_id); diff --git a/src/win32/cairo-win32-device.c b/src/win32/cairo-win32-device.c index 781ee0cde..6032ce7d9 100644 --- a/src/win32/cairo-win32-device.c +++ b/src/win32/cairo-win32-device.c @@ -136,7 +136,7 @@ _cairo_win32_device_get (void) device->msimg32_dll = NULL; device->alpha_blend = _cairo_win32_device_get_alpha_blend (device); - if (_cairo_atomic_ptr_cmpxchg ((void **)&__cairo_win32_device, NULL, device)) + if (_cairo_atomic_ptr_cmpxchg ((cairo_atomic_intptr_t *)&__cairo_win32_device, NULL, device)) return cairo_device_reference(&device->base); _cairo_win32_device_destroy (device);