Minor refactoring to the atomics implementation

s/atomic_int/atomic_int_impl/ and s/atomic_ptr/atomic_ptr_impl/
to bring it in par with hb_mutex_impl_t, then re-introduce
hb_atomic_int_t as a wrapper around hb_atomic_int_impl_t.

In hb_reference_count_t, make it clear the non-atomic get and set
are intentional due to nature of the cases they are used in
(comparison to -1 and the debug output/tracing).
This commit is contained in:
Konstantin Ritt 2015-03-28 00:49:33 +04:00 committed by Behdad Esfahbod
parent 24930d544e
commit 3f174cd020
3 changed files with 69 additions and 42 deletions

View File

@ -39,11 +39,11 @@
/* We need external help for these */
#if defined(hb_atomic_int_add) \
&& defined(hb_atomic_ptr_get) \
&& defined(hb_atomic_ptr_cmpexch)
#if defined(hb_atomic_int_impl_add) \
&& defined(hb_atomic_ptr_impl_get) \
&& defined(hb_atomic_ptr_impl_cmpexch)
/* Defined externally, i.e. in config.h */
/* Defined externally, i.e. in config.h; must have typedef'ed hb_atomic_int_impl_t as well. */
#elif !defined(HB_NO_MT) && (defined(_WIN32) || defined(__CYGWIN__))
@ -62,11 +62,11 @@ static inline void _HBMemoryBarrier (void) {
#endif
}
typedef LONG hb_atomic_int_t;
#define hb_atomic_int_add(AI, V) InterlockedExchangeAdd (&(AI), (V))
typedef LONG hb_atomic_int_impl_t;
#define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd (&(AI), (V))
#define hb_atomic_ptr_get(P) (_HBMemoryBarrier (), (void *) *(P))
#define hb_atomic_ptr_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O))
#define hb_atomic_ptr_impl_get(P) (_HBMemoryBarrier (), (void *) *(P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O))
#elif !defined(HB_NO_MT) && defined(__APPLE__)
@ -78,28 +78,31 @@ typedef LONG hb_atomic_int_t;
#include <Availability.h>
#endif
typedef int32_t hb_atomic_int_t;
#define hb_atomic_int_add(AI, V) (OSAtomicAdd32Barrier ((V), &(AI)) - (V))
#define hb_atomic_ptr_get(P) (OSMemoryBarrier (), (void *) *(P))
typedef int32_t hb_atomic_int_impl_t;
#define HB_ATOMIC_INT_IMPL_INIT(V) (V)
#define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), &(AI)) - (V))
#define hb_atomic_ptr_impl_get(P) (OSMemoryBarrier (), (void *) *(P))
#if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
#define hb_atomic_ptr_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
#else
#if __ppc64__ || __x86_64__ || __aarch64__
#define hb_atomic_ptr_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P))
#else
#define hb_atomic_ptr_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P))
#endif
#endif
#elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
typedef int hb_atomic_int_t;
#define hb_atomic_int_add(AI, V) __sync_fetch_and_add (&(AI), (V))
typedef int hb_atomic_int_impl_t;
#define HB_ATOMIC_INT_IMPL_INIT(V) (V)
#define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add (&(AI), (V))
#define hb_atomic_ptr_get(P) (void *) (__sync_synchronize (), *(P))
#define hb_atomic_ptr_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
#define hb_atomic_ptr_impl_get(P) (void *) (__sync_synchronize (), *(P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
#elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
@ -107,33 +110,54 @@ typedef int hb_atomic_int_t;
#include <atomic.h>
#include <mbarrier.h>
typedef unsigned int hb_atomic_int_t;
#define hb_atomic_int_add(AI, V) ( ({__machine_rw_barrier ();}), atomic_add_int_nv (&(AI), (V)) - (V))
typedef unsigned int hb_atomic_int_impl_t;
#define HB_ATOMIC_INT_IMPL_INIT(V) (V)
#define hb_atomic_int_impl_add(AI, V) ( ({__machine_rw_barrier ();}), atomic_add_int_nv (&(AI), (V)) - (V))
#define hb_atomic_ptr_get(P) ( ({__machine_rw_barrier ();}), (void *) *(P))
#define hb_atomic_ptr_cmpexch(P,O,N) ( ({__machine_rw_barrier ();}), atomic_cas_ptr ((void **) (P), (void *) (O), (void *) (N)) == (void *) (O) ? true : false)
#define hb_atomic_ptr_impl_get(P) ( ({__machine_rw_barrier ();}), (void *) *(P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) ( ({__machine_rw_barrier ();}), atomic_cas_ptr ((void **) (P), (void *) (O), (void *) (N)) == (void *) (O) ? true : false)
#elif !defined(HB_NO_MT)
#define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
typedef volatile int hb_atomic_int_t;
#define hb_atomic_int_add(AI, V) (((AI) += (V)) - (V))
#define hb_atomic_ptr_get(P) ((void *) *(P))
#define hb_atomic_ptr_cmpexch(P,O,N) (* (void * volatile *) (P) == (void *) (O) ? (* (void * volatile *) (P) = (void *) (N), true) : false)
typedef volatile int hb_atomic_int_impl_t;
#define HB_ATOMIC_INT_IMPL_INIT(V) (V)
#define hb_atomic_int_impl_add(AI, V) (((AI) += (V)) - (V))
#define hb_atomic_ptr_impl_get(P) ((void *) *(P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void * volatile *) (P) == (void *) (O) ? (* (void * volatile *) (P) = (void *) (N), true) : false)
#else /* HB_NO_MT */
typedef int hb_atomic_int_t;
#define hb_atomic_int_add(AI, V) (((AI) += (V)) - (V))
typedef int hb_atomic_int_impl_t;
#define HB_ATOMIC_INT_IMPL_INIT(V) (V)
#define hb_atomic_int_impl_add(AI, V) (((AI) += (V)) - (V))
#define hb_atomic_ptr_impl_get(P) ((void *) *(P))
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
#define hb_atomic_ptr_get(P) ((void *) *(P))
#define hb_atomic_ptr_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
#endif
/* TODO Add tracing. */
#define HB_ATOMIC_INT_INIT(V) {HB_ATOMIC_INT_IMPL_INIT(V)}
struct hb_atomic_int_t
{
hb_atomic_int_impl_t ref;
inline void set_unsafe (int v) { ref = v; }
inline int get_unsafe (void) const { return ref; }
inline int inc (void) { return hb_atomic_int_impl_add (const_cast<hb_atomic_int_impl_t &> (ref), 1); }
inline int dec (void) { return hb_atomic_int_impl_add (const_cast<hb_atomic_int_impl_t &> (ref), -1); }
};
#define hb_atomic_ptr_get(P) hb_atomic_ptr_impl_get(P)
#define hb_atomic_ptr_cmpexch(P,O,N) hb_atomic_ptr_impl_cmpexch((P),(O),(N))
#endif /* HB_ATOMIC_PRIVATE_HH */

View File

@ -45,7 +45,7 @@
&& defined(hb_mutex_impl_unlock) \
&& defined(hb_mutex_impl_finish)
/* Defined externally, i.e. in config.h */
/* Defined externally, i.e. in config.h; must have typedef'ed hb_mutex_impl_t as well. */
#elif !defined(HB_NO_MT) && (defined(_WIN32) || defined(__CYGWIN__))
@ -119,10 +119,12 @@ typedef int hb_mutex_impl_t;
#define hb_mutex_impl_unlock(M) HB_STMT_START {} HB_STMT_END
#define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END
#endif
#define HB_MUTEX_INIT {HB_MUTEX_IMPL_INIT}
struct hb_mutex_t
{
/* TODO Add tracing. */

View File

@ -47,19 +47,20 @@
/* reference_count */
#define HB_REFERENCE_COUNT_INVALID_VALUE ((hb_atomic_int_t) -1)
#define HB_REFERENCE_COUNT_INVALID {HB_REFERENCE_COUNT_INVALID_VALUE}
#define HB_REFERENCE_COUNT_INVALID_VALUE -1
#define HB_REFERENCE_COUNT_INIT {HB_ATOMIC_INT_INIT(HB_REFERENCE_COUNT_INVALID_VALUE)}
struct hb_reference_count_t
{
hb_atomic_int_t ref_count;
inline void init (int v) { ref_count = v; }
inline int inc (void) { return hb_atomic_int_add (const_cast<hb_atomic_int_t &> (ref_count), 1); }
inline int dec (void) { return hb_atomic_int_add (const_cast<hb_atomic_int_t &> (ref_count), -1); }
inline void finish (void) { ref_count = HB_REFERENCE_COUNT_INVALID_VALUE; }
inline bool is_invalid (void) const { return ref_count == HB_REFERENCE_COUNT_INVALID_VALUE; }
inline void init (int v) { ref_count.set_unsafe (v); }
inline int get_unsafe (void) const { return ref_count.get_unsafe (); }
inline int inc (void) { return ref_count.inc (); }
inline int dec (void) { return ref_count.dec (); }
inline void finish (void) { ref_count.set_unsafe (HB_REFERENCE_COUNT_INVALID_VALUE); }
inline bool is_invalid (void) const { return ref_count.get_unsafe () == HB_REFERENCE_COUNT_INVALID_VALUE; }
};
@ -102,7 +103,7 @@ struct hb_object_header_t
hb_reference_count_t ref_count;
hb_user_data_array_t user_data;
#define HB_OBJECT_HEADER_STATIC {HB_REFERENCE_COUNT_INVALID, HB_USER_DATA_ARRAY_INIT}
#define HB_OBJECT_HEADER_STATIC {HB_REFERENCE_COUNT_INIT, HB_USER_DATA_ARRAY_INIT}
private:
ASSERT_POD ();
@ -117,7 +118,7 @@ static inline void hb_object_trace (const Type *obj, const char *function)
DEBUG_MSG (OBJECT, (void *) obj,
"%s refcount=%d",
function,
obj ? obj->header.ref_count.ref_count : 0);
obj ? obj->header.ref_count.get_unsafe () : 0);
}
template <typename Type>