@ -118,46 +118,6 @@
*/
*/
# include "port/atomics/generic.h"
# include "port/atomics/generic.h"
/*
* Provide declarations for all functions here - on most platforms static
* inlines are used and these aren ' t necessary , but when static inline is
* unsupported these will be external functions .
*/
STATIC_IF_INLINE_DECLARE void pg_atomic_init_flag ( volatile pg_atomic_flag * ptr ) ;
STATIC_IF_INLINE_DECLARE bool pg_atomic_test_set_flag ( volatile pg_atomic_flag * ptr ) ;
STATIC_IF_INLINE_DECLARE bool pg_atomic_unlocked_test_flag ( volatile pg_atomic_flag * ptr ) ;
STATIC_IF_INLINE_DECLARE void pg_atomic_clear_flag ( volatile pg_atomic_flag * ptr ) ;
STATIC_IF_INLINE_DECLARE void pg_atomic_init_u32 ( volatile pg_atomic_uint32 * ptr , uint32 val ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_read_u32 ( volatile pg_atomic_uint32 * ptr ) ;
STATIC_IF_INLINE_DECLARE void pg_atomic_write_u32 ( volatile pg_atomic_uint32 * ptr , uint32 val ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_exchange_u32 ( volatile pg_atomic_uint32 * ptr , uint32 newval ) ;
STATIC_IF_INLINE_DECLARE bool pg_atomic_compare_exchange_u32 ( volatile pg_atomic_uint32 * ptr ,
uint32 * expected , uint32 newval ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_add_u32 ( volatile pg_atomic_uint32 * ptr , int32 add_ ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_sub_u32 ( volatile pg_atomic_uint32 * ptr , int32 sub_ ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_and_u32 ( volatile pg_atomic_uint32 * ptr , uint32 and_ ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_fetch_or_u32 ( volatile pg_atomic_uint32 * ptr , uint32 or_ ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_add_fetch_u32 ( volatile pg_atomic_uint32 * ptr , int32 add_ ) ;
STATIC_IF_INLINE_DECLARE uint32 pg_atomic_sub_fetch_u32 ( volatile pg_atomic_uint32 * ptr , int32 sub_ ) ;
# ifdef PG_HAVE_ATOMIC_U64_SUPPORT
STATIC_IF_INLINE_DECLARE void pg_atomic_init_u64 ( volatile pg_atomic_uint64 * ptr , uint64 val_ ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_read_u64 ( volatile pg_atomic_uint64 * ptr ) ;
STATIC_IF_INLINE_DECLARE void pg_atomic_write_u64 ( volatile pg_atomic_uint64 * ptr , uint64 val ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_exchange_u64 ( volatile pg_atomic_uint64 * ptr , uint64 newval ) ;
STATIC_IF_INLINE_DECLARE bool pg_atomic_compare_exchange_u64 ( volatile pg_atomic_uint64 * ptr ,
uint64 * expected , uint64 newval ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_add_u64 ( volatile pg_atomic_uint64 * ptr , int64 add_ ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_sub_u64 ( volatile pg_atomic_uint64 * ptr , int64 sub_ ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_and_u64 ( volatile pg_atomic_uint64 * ptr , uint64 and_ ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_fetch_or_u64 ( volatile pg_atomic_uint64 * ptr , uint64 or_ ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_add_fetch_u64 ( volatile pg_atomic_uint64 * ptr , int64 add_ ) ;
STATIC_IF_INLINE_DECLARE uint64 pg_atomic_sub_fetch_u64 ( volatile pg_atomic_uint64 * ptr , int64 sub_ ) ;
# endif /* PG_HAVE_64_BIT_ATOMICS */
/*
/*
* pg_compiler_barrier - prevent the compiler from moving code across
* pg_compiler_barrier - prevent the compiler from moving code across
@ -201,18 +161,12 @@ STATIC_IF_INLINE_DECLARE uint64 pg_atomic_sub_fetch_u64(volatile pg_atomic_uint6
*/
*/
# define pg_spin_delay() pg_spin_delay_impl()
# define pg_spin_delay() pg_spin_delay_impl()
/*
* The following functions are wrapper functions around the platform specific
* implementation of the atomic operations performing common checks .
*/
# if defined(PG_USE_INLINE) || defined(ATOMICS_INCLUDE_DEFINITIONS)
/*
/*
* pg_atomic_init_flag - initialize atomic flag .
* pg_atomic_init_flag - initialize atomic flag .
*
*
* No barrier semantics .
* No barrier semantics .
*/
*/
STATIC_IF_INLINE_DECLARE void
static inline void
pg_atomic_init_flag ( volatile pg_atomic_flag * ptr )
pg_atomic_init_flag ( volatile pg_atomic_flag * ptr )
{
{
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
@ -227,7 +181,7 @@ pg_atomic_init_flag(volatile pg_atomic_flag *ptr)
*
*
* Acquire ( including read barrier ) semantics .
* Acquire ( including read barrier ) semantics .
*/
*/
STATIC_IF_INLINE_DECLARE bool
static inline bool
pg_atomic_test_set_flag ( volatile pg_atomic_flag * ptr )
pg_atomic_test_set_flag ( volatile pg_atomic_flag * ptr )
{
{
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
@ -242,7 +196,7 @@ pg_atomic_test_set_flag(volatile pg_atomic_flag *ptr)
*
*
* No barrier semantics .
* No barrier semantics .
*/
*/
STATIC_IF_INLINE_DECLARE bool
static inline bool
pg_atomic_unlocked_test_flag ( volatile pg_atomic_flag * ptr )
pg_atomic_unlocked_test_flag ( volatile pg_atomic_flag * ptr )
{
{
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
@ -255,7 +209,7 @@ pg_atomic_unlocked_test_flag(volatile pg_atomic_flag *ptr)
*
*
* Release ( including write barrier ) semantics .
* Release ( including write barrier ) semantics .
*/
*/
STATIC_IF_INLINE_DECLARE void
static inline void
pg_atomic_clear_flag ( volatile pg_atomic_flag * ptr )
pg_atomic_clear_flag ( volatile pg_atomic_flag * ptr )
{
{
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
AssertPointerAlignment ( ptr , sizeof ( * ptr ) ) ;
@ -271,7 +225,7 @@ pg_atomic_clear_flag(volatile pg_atomic_flag *ptr)
*
*
* No barrier semantics .
* No barrier semantics .
*/
*/
STATIC_IF_INLINE_DECLARE void
static inline void
pg_atomic_init_u32 ( volatile pg_atomic_uint32 * ptr , uint32 val )
pg_atomic_init_u32 ( volatile pg_atomic_uint32 * ptr , uint32 val )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -289,7 +243,7 @@ pg_atomic_init_u32(volatile pg_atomic_uint32 *ptr, uint32 val)
*
*
* No barrier semantics .
* No barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_read_u32 ( volatile pg_atomic_uint32 * ptr )
pg_atomic_read_u32 ( volatile pg_atomic_uint32 * ptr )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -304,7 +258,7 @@ pg_atomic_read_u32(volatile pg_atomic_uint32 *ptr)
*
*
* No barrier semantics .
* No barrier semantics .
*/
*/
STATIC_IF_INLINE_DECLARE void
static inline void
pg_atomic_write_u32 ( volatile pg_atomic_uint32 * ptr , uint32 val )
pg_atomic_write_u32 ( volatile pg_atomic_uint32 * ptr , uint32 val )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -319,7 +273,7 @@ pg_atomic_write_u32(volatile pg_atomic_uint32 *ptr, uint32 val)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_exchange_u32 ( volatile pg_atomic_uint32 * ptr , uint32 newval )
pg_atomic_exchange_u32 ( volatile pg_atomic_uint32 * ptr , uint32 newval )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -338,7 +292,7 @@ pg_atomic_exchange_u32(volatile pg_atomic_uint32 *ptr, uint32 newval)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE bool
static inline bool
pg_atomic_compare_exchange_u32 ( volatile pg_atomic_uint32 * ptr ,
pg_atomic_compare_exchange_u32 ( volatile pg_atomic_uint32 * ptr ,
uint32 * expected , uint32 newval )
uint32 * expected , uint32 newval )
{
{
@ -355,7 +309,7 @@ pg_atomic_compare_exchange_u32(volatile pg_atomic_uint32 *ptr,
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_fetch_add_u32 ( volatile pg_atomic_uint32 * ptr , int32 add_ )
pg_atomic_fetch_add_u32 ( volatile pg_atomic_uint32 * ptr , int32 add_ )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -370,7 +324,7 @@ pg_atomic_fetch_add_u32(volatile pg_atomic_uint32 *ptr, int32 add_)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_fetch_sub_u32 ( volatile pg_atomic_uint32 * ptr , int32 sub_ )
pg_atomic_fetch_sub_u32 ( volatile pg_atomic_uint32 * ptr , int32 sub_ )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -385,7 +339,7 @@ pg_atomic_fetch_sub_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_fetch_and_u32 ( volatile pg_atomic_uint32 * ptr , uint32 and_ )
pg_atomic_fetch_and_u32 ( volatile pg_atomic_uint32 * ptr , uint32 and_ )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -399,7 +353,7 @@ pg_atomic_fetch_and_u32(volatile pg_atomic_uint32 *ptr, uint32 and_)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_fetch_or_u32 ( volatile pg_atomic_uint32 * ptr , uint32 or_ )
pg_atomic_fetch_or_u32 ( volatile pg_atomic_uint32 * ptr , uint32 or_ )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -413,7 +367,7 @@ pg_atomic_fetch_or_u32(volatile pg_atomic_uint32 *ptr, uint32 or_)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_add_fetch_u32 ( volatile pg_atomic_uint32 * ptr , int32 add_ )
pg_atomic_add_fetch_u32 ( volatile pg_atomic_uint32 * ptr , int32 add_ )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -428,7 +382,7 @@ pg_atomic_add_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 add_)
*
*
* Full barrier semantics .
* Full barrier semantics .
*/
*/
STATIC_IF_INLINE uint32
static inline uint32
pg_atomic_sub_fetch_u32 ( volatile pg_atomic_uint32 * ptr , int32 sub_ )
pg_atomic_sub_fetch_u32 ( volatile pg_atomic_uint32 * ptr , int32 sub_ )
{
{
AssertPointerAlignment ( ptr , 4 ) ;
AssertPointerAlignment ( ptr , 4 ) ;
@ -444,7 +398,7 @@ pg_atomic_sub_fetch_u32(volatile pg_atomic_uint32 *ptr, int32 sub_)
*/
*/
# ifdef PG_HAVE_ATOMIC_U64_SUPPORT
# ifdef PG_HAVE_ATOMIC_U64_SUPPORT
STATIC_IF_INLINE_DECLARE void
static inline void
pg_atomic_init_u64 ( volatile pg_atomic_uint64 * ptr , uint64 val )
pg_atomic_init_u64 ( volatile pg_atomic_uint64 * ptr , uint64 val )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
@ -452,21 +406,21 @@ pg_atomic_init_u64(volatile pg_atomic_uint64 *ptr, uint64 val)
pg_atomic_init_u64_impl ( ptr , val ) ;
pg_atomic_init_u64_impl ( ptr , val ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_read_u64 ( volatile pg_atomic_uint64 * ptr )
pg_atomic_read_u64 ( volatile pg_atomic_uint64 * ptr )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
return pg_atomic_read_u64_impl ( ptr ) ;
return pg_atomic_read_u64_impl ( ptr ) ;
}
}
STATIC_IF_INLINE void
static inline void
pg_atomic_write_u64 ( volatile pg_atomic_uint64 * ptr , uint64 val )
pg_atomic_write_u64 ( volatile pg_atomic_uint64 * ptr , uint64 val )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
pg_atomic_write_u64_impl ( ptr , val ) ;
pg_atomic_write_u64_impl ( ptr , val ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_exchange_u64 ( volatile pg_atomic_uint64 * ptr , uint64 newval )
pg_atomic_exchange_u64 ( volatile pg_atomic_uint64 * ptr , uint64 newval )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
@ -474,7 +428,7 @@ pg_atomic_exchange_u64(volatile pg_atomic_uint64 *ptr, uint64 newval)
return pg_atomic_exchange_u64_impl ( ptr , newval ) ;
return pg_atomic_exchange_u64_impl ( ptr , newval ) ;
}
}
STATIC_IF_INLINE bool
static inline bool
pg_atomic_compare_exchange_u64 ( volatile pg_atomic_uint64 * ptr ,
pg_atomic_compare_exchange_u64 ( volatile pg_atomic_uint64 * ptr ,
uint64 * expected , uint64 newval )
uint64 * expected , uint64 newval )
{
{
@ -483,14 +437,14 @@ pg_atomic_compare_exchange_u64(volatile pg_atomic_uint64 *ptr,
return pg_atomic_compare_exchange_u64_impl ( ptr , expected , newval ) ;
return pg_atomic_compare_exchange_u64_impl ( ptr , expected , newval ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_fetch_add_u64 ( volatile pg_atomic_uint64 * ptr , int64 add_ )
pg_atomic_fetch_add_u64 ( volatile pg_atomic_uint64 * ptr , int64 add_ )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
return pg_atomic_fetch_add_u64_impl ( ptr , add_ ) ;
return pg_atomic_fetch_add_u64_impl ( ptr , add_ ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_fetch_sub_u64 ( volatile pg_atomic_uint64 * ptr , int64 sub_ )
pg_atomic_fetch_sub_u64 ( volatile pg_atomic_uint64 * ptr , int64 sub_ )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
@ -498,28 +452,28 @@ pg_atomic_fetch_sub_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
return pg_atomic_fetch_sub_u64_impl ( ptr , sub_ ) ;
return pg_atomic_fetch_sub_u64_impl ( ptr , sub_ ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_fetch_and_u64 ( volatile pg_atomic_uint64 * ptr , uint64 and_ )
pg_atomic_fetch_and_u64 ( volatile pg_atomic_uint64 * ptr , uint64 and_ )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
return pg_atomic_fetch_and_u64_impl ( ptr , and_ ) ;
return pg_atomic_fetch_and_u64_impl ( ptr , and_ ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_fetch_or_u64 ( volatile pg_atomic_uint64 * ptr , uint64 or_ )
pg_atomic_fetch_or_u64 ( volatile pg_atomic_uint64 * ptr , uint64 or_ )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
return pg_atomic_fetch_or_u64_impl ( ptr , or_ ) ;
return pg_atomic_fetch_or_u64_impl ( ptr , or_ ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_add_fetch_u64 ( volatile pg_atomic_uint64 * ptr , int64 add_ )
pg_atomic_add_fetch_u64 ( volatile pg_atomic_uint64 * ptr , int64 add_ )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
return pg_atomic_add_fetch_u64_impl ( ptr , add_ ) ;
return pg_atomic_add_fetch_u64_impl ( ptr , add_ ) ;
}
}
STATIC_IF_INLINE uint64
static inline uint64
pg_atomic_sub_fetch_u64 ( volatile pg_atomic_uint64 * ptr , int64 sub_ )
pg_atomic_sub_fetch_u64 ( volatile pg_atomic_uint64 * ptr , int64 sub_ )
{
{
AssertPointerAlignment ( ptr , 8 ) ;
AssertPointerAlignment ( ptr , 8 ) ;
@ -529,9 +483,6 @@ pg_atomic_sub_fetch_u64(volatile pg_atomic_uint64 *ptr, int64 sub_)
# endif /* PG_HAVE_64_BIT_ATOMICS */
# endif /* PG_HAVE_64_BIT_ATOMICS */
# endif / * defined(PG_USE_INLINE) ||
* defined ( ATOMICS_INCLUDE_DEFINITIONS ) */
# undef INSIDE_ATOMICS_H
# undef INSIDE_ATOMICS_H
# endif /* ATOMICS_H */
# endif /* ATOMICS_H */