Further atomic ops portability improvements and bug fixes.
* Don't play tricks for a more efficient pg_atomic_clear_flag() in the generic gcc implementation. The old version was broken on gcc < 4.7 on !x86 platforms. Per buildfarm member chipmunk. * Make usage of __atomic() fences depend on HAVE_GCC__ATOMIC_INT32_CAS instead of HAVE_GCC__ATOMIC_INT64_CAS - there's platforms with 32bit support that don't support 64bit atomics. * Blindly fix two superflous #endif in generic-xlc.h * Check for --disable-atomics in platforms but x86.
This commit is contained in:
parent
a30199b01b
commit
f9f07411a5
@ -78,6 +78,7 @@ typedef struct pg_atomic_uint64
|
||||
#endif
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
#endif /* defined(__GNUC__) && !defined(__INTEL_COMPILER) */
|
||||
|
||||
#if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS)
|
||||
@ -160,6 +161,18 @@ pg_atomic_test_set_flag_impl(volatile pg_atomic_flag *ptr)
|
||||
return _res == 0;
|
||||
}
|
||||
|
||||
#define PG_HAVE_ATOMIC_CLEAR_FLAG
|
||||
static inline void
|
||||
pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
|
||||
{
|
||||
/*
|
||||
* On a TSO architecture like x86 it's sufficient to use a compiler
|
||||
* barrier to achieve release semantics.
|
||||
*/
|
||||
__asm__ __volatile__("" ::: "memory");
|
||||
ptr->value = 0;
|
||||
}
|
||||
|
||||
#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
|
||||
static inline bool
|
||||
pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr,
|
||||
|
@ -25,6 +25,10 @@
|
||||
|
||||
#include <machine/sys/inline.h>
|
||||
|
||||
#define pg_compiler_barrier_impl() _Asm_sched_fence()
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
/* IA64 always has 32/64 bit atomics */
|
||||
|
||||
#define PG_HAVE_ATOMIC_U32_SUPPORT
|
||||
@ -39,10 +43,13 @@ typedef struct pg_atomic_uint64
|
||||
volatile uint64 value;
|
||||
} pg_atomic_uint64;
|
||||
|
||||
#define pg_compiler_barrier_impl() _Asm_sched_fence()
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
|
||||
#if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS)
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
#define MINOR_FENCE (_Asm_fence) (_UP_CALL_FENCE | _UP_SYS_FENCE | \
|
||||
_DOWN_CALL_FENCE | _DOWN_SYS_FENCE )
|
||||
|
||||
@ -96,4 +103,6 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
|
||||
|
||||
#undef MINOR_FENCE
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
#endif /* defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS) */
|
||||
|
@ -40,19 +40,19 @@
|
||||
* definitions where possible, and use this only as a fallback.
|
||||
*/
|
||||
#if !defined(pg_memory_barrier_impl)
|
||||
# if defined(HAVE_GCC__ATOMIC_INT64_CAS)
|
||||
# if defined(HAVE_GCC__ATOMIC_INT32_CAS)
|
||||
# define pg_memory_barrier_impl() __atomic_thread_fence(__ATOMIC_SEQ_CST)
|
||||
# elif (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1))
|
||||
# define pg_memory_barrier_impl() __sync_synchronize()
|
||||
# endif
|
||||
#endif /* !defined(pg_memory_barrier_impl) */
|
||||
|
||||
#if !defined(pg_read_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT64_CAS)
|
||||
#if !defined(pg_read_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
|
||||
/* acquire semantics include read barrier semantics */
|
||||
# define pg_read_barrier_impl() __atomic_thread_fence(__ATOMIC_ACQUIRE)
|
||||
#endif
|
||||
|
||||
#if !defined(pg_write_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT64_CAS)
|
||||
#if !defined(pg_write_barrier_impl) && defined(HAVE_GCC__ATOMIC_INT32_CAS)
|
||||
/* release semantics include write barrier semantics */
|
||||
# define pg_write_barrier_impl() __atomic_thread_fence(__ATOMIC_RELEASE)
|
||||
#endif
|
||||
@ -139,13 +139,7 @@ pg_atomic_unlocked_test_flag_impl(volatile pg_atomic_flag *ptr)
|
||||
static inline void
|
||||
pg_atomic_clear_flag_impl(volatile pg_atomic_flag *ptr)
|
||||
{
|
||||
/*
|
||||
* XXX: It would be nicer to use __sync_lock_release here, but gcc insists
|
||||
* on making that an atomic op which is far to expensive and a stronger
|
||||
* guarantee than what we actually need.
|
||||
*/
|
||||
pg_write_barrier_impl();
|
||||
ptr->value = 0;
|
||||
__sync_lock_release(&ptr->value);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -32,6 +32,8 @@
|
||||
#define pg_memory_barrier_impl() MemoryBarrier()
|
||||
#endif
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
#define PG_HAVE_ATOMIC_U32_SUPPORT
|
||||
typedef struct pg_atomic_uint32
|
||||
{
|
||||
@ -44,9 +46,13 @@ typedef struct pg_atomic_uint64
|
||||
volatile uint64 value;
|
||||
} pg_atomic_uint64;
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
|
||||
#if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS)
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
|
||||
static inline bool
|
||||
pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr,
|
||||
@ -100,4 +106,6 @@ pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
|
||||
}
|
||||
#endif /* _WIN64 */
|
||||
|
||||
#endif /* HAVE_ATOMICS */
|
||||
|
||||
#endif /* defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS) */
|
||||
|
@ -17,6 +17,8 @@
|
||||
* -------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
/* Older versions of the compiler don't have atomic.h... */
|
||||
#ifdef HAVE_ATOMIC_H
|
||||
|
||||
@ -36,9 +38,13 @@ typedef struct pg_atomic_uint64
|
||||
|
||||
#endif /* HAVE_ATOMIC_H */
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
|
||||
#if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS)
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
#ifdef HAVE_ATOMIC_H
|
||||
|
||||
#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
|
||||
@ -71,4 +77,6 @@ pg_atomic_compare_exchange_u64_impl(volatile pg_atomic_uint64 *ptr,
|
||||
|
||||
#endif /* HAVE_ATOMIC_H */
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
#endif /* defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS) */
|
||||
|
@ -16,6 +16,8 @@
|
||||
* -------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
#include <atomic.h>
|
||||
|
||||
#define PG_HAVE_ATOMIC_U32_SUPPORT
|
||||
@ -35,8 +37,12 @@ typedef struct pg_atomic_uint64
|
||||
|
||||
#endif /* __64BIT__ */
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
#if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS)
|
||||
|
||||
#if defined(HAVE_ATOMICS)
|
||||
|
||||
#define PG_HAVE_ATOMIC_COMPARE_EXCHANGE_U32
|
||||
static inline bool
|
||||
pg_atomic_compare_exchange_u32_impl(volatile pg_atomic_uint32 *ptr,
|
||||
@ -69,7 +75,6 @@ pg_atomic_fetch_add_u32_impl(volatile pg_atomic_uint32 *ptr, int32 add_)
|
||||
{
|
||||
return __fetch_and_add(&ptr->value, add_);
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef PG_HAVE_ATOMIC_U64_SUPPORT
|
||||
|
||||
@ -96,8 +101,9 @@ pg_atomic_fetch_add_u64_impl(volatile pg_atomic_uint64 *ptr, int64 add_)
|
||||
{
|
||||
return __fetch_and_addlp(&ptr->value, add_);
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* PG_HAVE_ATOMIC_U64_SUPPORT */
|
||||
|
||||
#endif /* defined(HAVE_ATOMICS) */
|
||||
|
||||
#endif /* defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS) */
|
||||
|
Loading…
x
Reference in New Issue
Block a user