mirror of
https://gitlab.freedesktop.org/cairo/cairo.git
synced 2026-03-30 01:20:41 +02:00
Use more relaxed atomics
Turns out all uses of _cairo_atomic_ptr_get can be turned to relaxed loads. So we rename _cairo_atomic_ptr_get to _cairo_atomic_ptr_get_relaxed and drop its memory-ordering constraints. In addition, most uses of _cairo_atomic_int_get can be turned to relaxed loads. We replace these calls of _cairo_atomic_int_get to _cairo_atomic_int_get_relaxed (which is already implemented)
This commit is contained in:
parent
2e65e01d25
commit
bedec29b00
4 changed files with 22 additions and 18 deletions
|
|
@ -75,9 +75,9 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
|
|||
}
|
||||
|
||||
static cairo_always_inline void *
|
||||
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
|
||||
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
|
||||
{
|
||||
return atomic_load_explicit (x, memory_order_seq_cst);
|
||||
return atomic_load_explicit (x, memory_order_relaxed);
|
||||
}
|
||||
|
||||
# define _cairo_atomic_int_inc(x) ((void) atomic_fetch_add_explicit(x, 1, memory_order_seq_cst))
|
||||
|
|
@ -169,9 +169,9 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
|
|||
}
|
||||
|
||||
static cairo_always_inline void *
|
||||
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
|
||||
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
|
||||
{
|
||||
return (void*)__atomic_load_n(x, __ATOMIC_SEQ_CST);
|
||||
return (void*)__atomic_load_n(x, __ATOMIC_RELAXED);
|
||||
}
|
||||
|
||||
# define _cairo_atomic_int_inc(x) ((void) __atomic_fetch_add(x, 1, __ATOMIC_SEQ_CST))
|
||||
|
|
@ -253,10 +253,9 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
|
|||
}
|
||||
|
||||
static cairo_always_inline void *
|
||||
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
|
||||
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
|
||||
{
|
||||
__sync_synchronize ();
|
||||
return (void*)*x;
|
||||
return *(void * volatile *)x;
|
||||
}
|
||||
|
||||
# define _cairo_atomic_int_inc(x) ((void) __sync_fetch_and_add(x, 1))
|
||||
|
|
@ -300,7 +299,7 @@ typedef AO_t cairo_atomic_intptr_t;
|
|||
|
||||
static_assert (sizeof (AO_t) >= sizeof (void *), "AO_t cannot be used for pointers");
|
||||
|
||||
# define _cairo_atomic_ptr_get(x) _cairo_atomic_intptr_to_voidptr (AO_load_full (x))
|
||||
# define _cairo_atomic_ptr_get_relaxed(x) _cairo_atomic_intptr_to_voidptr (AO_load (x))
|
||||
# define _cairo_atomic_ptr_cmpxchg(x, oldv, newv) AO_compare_and_swap_full(x, (AO_t)oldv, (AO_t)newv)
|
||||
# define _cairo_atomic_ptr_cmpxchg_return_old(x, oldv, newv) AO_fetch_compare_and_swap_full(x, (AO_t)oldv, (AO_t)newv)
|
||||
|
||||
|
|
@ -335,7 +334,7 @@ typedef intptr_t cairo_atomic_intptr_t;
|
|||
#error No matching integer pointer type
|
||||
#endif
|
||||
|
||||
# define _cairo_atomic_ptr_get(x) (OSMemoryBarrier(), *(x))
|
||||
# define _cairo_atomic_ptr_get_relaxed(x) (*(void * volatile *)(x))
|
||||
|
||||
#endif /* HAVE_OS_ATOMIC_OPS */
|
||||
|
||||
|
|
@ -386,10 +385,15 @@ _cairo_atomic_int_cmpxchg_return_old_impl (cairo_atomic_int_t *x,
|
|||
_cairo_atomic_int_cmpxchg_return_old_impl(x, oldv, newv)
|
||||
|
||||
static cairo_always_inline void *
|
||||
_cairo_atomic_ptr_get (cairo_atomic_intptr_t *x)
|
||||
_cairo_atomic_ptr_get_relaxed (cairo_atomic_intptr_t *x)
|
||||
{
|
||||
MemoryBarrier ();
|
||||
return (void *) *x;
|
||||
#if SIZEOF_VOID_P == 4
|
||||
return (void *) __iso_volatile_load32 ((__int32 *) (void *) x);
|
||||
#elif SIZEOF_VOID_P == 8
|
||||
return (void *) __iso_volatile_load64 ((__int64 *) (void *) x);
|
||||
#else
|
||||
#error "unknown pointer size"
|
||||
#endif
|
||||
}
|
||||
|
||||
static cairo_always_inline cairo_bool_t
|
||||
|
|
@ -445,7 +449,7 @@ void
|
|||
_cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val);
|
||||
|
||||
cairo_private void*
|
||||
_cairo_atomic_ptr_get(cairo_atomic_intptr_t *x);
|
||||
_cairo_atomic_ptr_get_relaxed(cairo_atomic_intptr_t *x);
|
||||
|
||||
#else
|
||||
|
||||
|
|
@ -462,7 +466,7 @@ _cairo_atomic_int_cmpxchg_return_old_fallback(cairo_atomic_int_t *x, int oldv, i
|
|||
int curr;
|
||||
|
||||
do {
|
||||
curr = _cairo_atomic_int_get (x);
|
||||
curr = _cairo_atomic_int_get_relaxed (x);
|
||||
} while (curr == oldv && !_cairo_atomic_int_cmpxchg (x, oldv, newv));
|
||||
|
||||
return curr;
|
||||
|
|
@ -474,7 +478,7 @@ _cairo_atomic_ptr_cmpxchg_return_old_fallback(cairo_atomic_intptr_t *x, void *ol
|
|||
void *curr;
|
||||
|
||||
do {
|
||||
curr = _cairo_atomic_ptr_get (x);
|
||||
curr = _cairo_atomic_ptr_get_relaxed (x);
|
||||
} while (curr == oldv && !_cairo_atomic_ptr_cmpxchg (x, oldv, newv));
|
||||
|
||||
return curr;
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ _cairo_atomic_int_set_relaxed (cairo_atomic_int_t *x, int val)
|
|||
}
|
||||
|
||||
void*
|
||||
_cairo_atomic_ptr_get (void **x)
|
||||
_cairo_atomic_ptr_get_relaxed (void **x)
|
||||
{
|
||||
void *ret;
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ _atomic_fetch (cairo_atomic_intptr_t *slot)
|
|||
void *ptr;
|
||||
|
||||
do {
|
||||
ptr = _cairo_atomic_ptr_get (slot);
|
||||
ptr = _cairo_atomic_ptr_get_relaxed (slot);
|
||||
} while (! _cairo_atomic_ptr_cmpxchg (slot, ptr, NULL));
|
||||
|
||||
return ptr;
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ typedef struct {
|
|||
|
||||
#define CAIRO_REFERENCE_COUNT_INIT(RC, VALUE) ((RC)->ref_count = (VALUE))
|
||||
|
||||
#define CAIRO_REFERENCE_COUNT_GET_VALUE(RC) _cairo_atomic_int_get (&(RC)->ref_count)
|
||||
#define CAIRO_REFERENCE_COUNT_GET_VALUE(RC) _cairo_atomic_int_get_relaxed (&(RC)->ref_count)
|
||||
|
||||
#define CAIRO_REFERENCE_COUNT_INVALID_VALUE ((int) -1)
|
||||
#define CAIRO_REFERENCE_COUNT_INVALID {CAIRO_REFERENCE_COUNT_INVALID_VALUE}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue