2012-05-18 02:30:46 +02:00
|
|
|
/*
|
|
|
|
* Copyright © 2007 Chris Wilson
|
|
|
|
* Copyright © 2009,2010 Red Hat, Inc.
|
2012-05-18 02:50:38 +02:00
|
|
|
* Copyright © 2011,2012 Google, Inc.
|
2012-05-18 02:30:46 +02:00
|
|
|
*
|
|
|
|
* This is part of HarfBuzz, a text shaping library.
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, without written agreement and without
|
|
|
|
* license or royalty fees, to use, copy, modify, and distribute this
|
|
|
|
* software and its documentation for any purpose, provided that the
|
|
|
|
* above copyright notice and the following two paragraphs appear in
|
|
|
|
* all copies of this software.
|
|
|
|
*
|
|
|
|
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
|
|
|
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
|
|
|
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
|
|
|
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
|
|
|
* DAMAGE.
|
|
|
|
*
|
|
|
|
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
|
|
|
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
|
|
|
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
|
|
|
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
|
|
|
*
|
|
|
|
* Contributor(s):
|
|
|
|
* Chris Wilson <chris@chris-wilson.co.uk>
|
|
|
|
* Red Hat Author(s): Behdad Esfahbod
|
|
|
|
* Google Author(s): Behdad Esfahbod
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef HB_ATOMIC_PRIVATE_HH
|
|
|
|
#define HB_ATOMIC_PRIVATE_HH
|
|
|
|
|
|
|
|
#include "hb-private.hh"
|
|
|
|
|
|
|
|
|
|
|
|
/* atomic_int */
|
|
|
|
|
|
|
|
/* We need external help for these */
|
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
#if defined(hb_atomic_int_impl_add) \
|
2018-08-01 04:29:49 +02:00
|
|
|
&& defined(hb_atomic_ptr_impl_get) \
|
2015-03-27 21:49:33 +01:00
|
|
|
&& defined(hb_atomic_ptr_impl_cmpexch)
|
2015-04-08 21:49:38 +02:00
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
/* Defined externally, i.e. in config.h; must have typedef'ed hb_atomic_int_impl_t as well. */
|
2012-05-18 02:30:46 +02:00
|
|
|
|
2012-05-18 03:53:24 +02:00
|
|
|
|
2018-08-01 04:33:37 +02:00
|
|
|
#elif !defined(HB_NO_MT) && defined(__ATOMIC_CONSUME)
|
2018-07-17 10:57:01 +02:00
|
|
|
|
|
|
|
/* C++11-style GCC primitives. */
|
|
|
|
|
|
|
|
typedef int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) __atomic_fetch_add ((AI), (V), __ATOMIC_ACQ_REL)
|
|
|
|
#define hb_atomic_int_impl_set_relaxed(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELAXED)
|
|
|
|
#define hb_atomic_int_impl_get_relaxed(AI) __atomic_load_n ((AI), __ATOMIC_RELAXED)
|
2018-07-17 10:57:01 +02:00
|
|
|
|
2018-08-01 04:33:37 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) __atomic_load_n ((P), __ATOMIC_CONSUME)
|
2018-07-17 10:57:01 +02:00
|
|
|
static inline bool
|
|
|
|
_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
|
|
|
|
{
|
|
|
|
const void *O = O_; // Need lvalue
|
|
|
|
return __atomic_compare_exchange_n ((void **) P, (void **) &O, (void *) N, true, __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
|
|
|
|
}
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (_hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N)))
|
|
|
|
|
2018-07-16 15:41:09 +02:00
|
|
|
#elif !defined(HB_NO_MT) && __cplusplus >= 201103L
|
|
|
|
|
2018-07-17 10:57:01 +02:00
|
|
|
/* C++11 atomics. */
|
2018-07-16 15:41:09 +02:00
|
|
|
|
|
|
|
#include <atomic>
|
|
|
|
|
|
|
|
typedef int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->fetch_add ((V), std::memory_order_acq_rel))
|
|
|
|
#define hb_atomic_int_impl_set_relaxed(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_relaxed))
|
|
|
|
#define hb_atomic_int_impl_get_relaxed(AI) (reinterpret_cast<std::atomic<int> *> (AI)->load (std::memory_order_relaxed))
|
2018-07-16 15:41:09 +02:00
|
|
|
|
2018-08-01 04:33:37 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_consume))
|
2018-07-16 15:41:09 +02:00
|
|
|
static inline bool
|
|
|
|
_hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
|
|
|
|
{
|
|
|
|
const void *O = O_; // Need lvalue
|
2018-07-17 10:57:01 +02:00
|
|
|
return reinterpret_cast<std::atomic<const void*> *> (P)->compare_exchange_weak (O, N, std::memory_order_acq_rel, std::memory_order_relaxed);
|
2018-07-16 15:41:09 +02:00
|
|
|
}
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (_hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N)))
|
|
|
|
|
|
|
|
|
2013-02-12 21:31:58 +01:00
|
|
|
#elif !defined(HB_NO_MT) && (defined(_WIN32) || defined(__CYGWIN__))
|
2012-08-29 00:03:35 +02:00
|
|
|
|
|
|
|
#include <windows.h>
|
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
/* MinGW has a convoluted history of supporting MemoryBarrier
|
|
|
|
* properly. As such, define a function to wrap the whole
|
|
|
|
* thing. */
|
|
|
|
static inline void _HBMemoryBarrier (void) {
|
|
|
|
#if !defined(MemoryBarrier)
|
|
|
|
long dummy = 0;
|
|
|
|
InterlockedExchange (&dummy, 1);
|
|
|
|
#else
|
|
|
|
MemoryBarrier ();
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
typedef LONG hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd ((AI), (V))
|
2012-05-18 02:30:46 +02:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) (_HBMemoryBarrier (), (void *) *(P))
|
2015-03-27 21:49:33 +01:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((void **) (P), (void *) (N), (void *) (O)) == (void *) (O))
|
2012-06-05 23:27:20 +02:00
|
|
|
|
2012-05-18 02:50:38 +02:00
|
|
|
|
2018-01-04 12:40:10 +01:00
|
|
|
#elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
|
|
|
|
|
|
|
|
typedef int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add ((AI), (V))
|
2018-01-04 12:40:10 +01:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) (void *) (__sync_synchronize (), *(P))
|
2018-01-04 12:40:10 +01:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
|
|
|
|
|
|
|
|
|
|
|
|
#elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
|
|
|
|
|
|
|
|
#include <atomic.h>
|
|
|
|
#include <mbarrier.h>
|
|
|
|
|
|
|
|
typedef unsigned int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) ( ({__machine_rw_barrier ();}), atomic_add_int_nv ((AI), (V)) - (V))
|
2018-01-04 12:40:10 +01:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) ( ({__machine_rw_barrier ();}), (void *) *(P))
|
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) ( ({__machine_rw_barrier ();}), atomic_cas_ptr ((void **) (P), (void *) (O), (void *) (N)) == (void *) (O) ? true : false)
|
2018-01-04 12:40:10 +01:00
|
|
|
|
|
|
|
|
2012-05-18 02:30:46 +02:00
|
|
|
#elif !defined(HB_NO_MT) && defined(__APPLE__)
|
|
|
|
|
|
|
|
#include <libkern/OSAtomic.h>
|
2012-12-10 21:25:21 +01:00
|
|
|
#ifdef __MAC_OS_X_MIN_REQUIRED
|
2012-12-10 06:57:00 +01:00
|
|
|
#include <AvailabilityMacros.h>
|
2012-12-10 21:25:21 +01:00
|
|
|
#elif defined(__IPHONE_OS_MIN_REQUIRED)
|
|
|
|
#include <Availability.h>
|
|
|
|
#endif
|
2012-06-05 23:27:20 +02:00
|
|
|
|
2012-05-18 02:30:46 +02:00
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
typedef int32_t hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), (AI)) - (V))
|
2015-03-27 21:49:33 +01:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) (OSMemoryBarrier (), (void *) *(P))
|
2012-12-10 21:25:21 +01:00
|
|
|
#if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
|
2015-03-27 21:49:33 +01:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((void *) (O), (void *) (N), (void **) (P))
|
2012-12-10 06:57:00 +01:00
|
|
|
#else
|
2014-04-02 12:35:27 +02:00
|
|
|
#if __ppc64__ || __x86_64__ || __aarch64__
|
2017-10-27 08:25:11 +02:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (void *) (O), (int64_t) (void *) (N), (int64_t*) (P))
|
2012-12-10 06:57:00 +01:00
|
|
|
#else
|
2017-10-27 08:25:11 +02:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (void *) (O), (int32_t) (void *) (N), (int32_t*) (P))
|
2012-12-10 06:57:00 +01:00
|
|
|
#endif
|
|
|
|
#endif
|
2012-06-05 23:27:20 +02:00
|
|
|
|
2012-05-18 02:50:38 +02:00
|
|
|
|
2015-12-10 17:44:19 +01:00
|
|
|
#elif !defined(HB_NO_MT) && defined(_AIX) && defined(__IBMCPP__)
|
2015-12-10 15:54:42 +01:00
|
|
|
|
|
|
|
#include <builtins.h>
|
|
|
|
|
|
|
|
|
2017-05-16 23:26:28 +02:00
|
|
|
static inline int _hb_fetch_and_add(volatile int* AI, unsigned int V) {
|
2015-12-10 15:54:42 +01:00
|
|
|
__lwsync();
|
|
|
|
int result = __fetch_and_add(AI, V);
|
|
|
|
__isync();
|
|
|
|
return result;
|
|
|
|
}
|
2017-05-16 23:26:28 +02:00
|
|
|
static inline int _hb_compare_and_swaplp(volatile long* P, long O, long N) {
|
2015-12-10 15:54:42 +01:00
|
|
|
__sync();
|
|
|
|
int result = __compare_and_swaplp (P, &O, N);
|
|
|
|
__sync();
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
typedef int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
|
2015-12-10 15:54:42 +01:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) (__sync(), (void *) *(P))
|
2017-05-16 23:26:28 +02:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swaplp ((long*)(P), (long)(O), (long)(N))
|
2015-12-10 15:54:42 +01:00
|
|
|
|
2012-06-05 22:34:49 +02:00
|
|
|
#elif !defined(HB_NO_MT)
|
|
|
|
|
|
|
|
#define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
|
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
typedef volatile int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
|
2015-03-27 21:49:33 +01:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) ((void *) *(P))
|
2015-03-27 21:49:33 +01:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void * volatile *) (P) == (void *) (O) ? (* (void * volatile *) (P) = (void *) (N), true) : false)
|
2012-06-05 23:27:20 +02:00
|
|
|
|
2012-06-05 22:34:49 +02:00
|
|
|
|
|
|
|
#else /* HB_NO_MT */
|
2012-05-18 02:30:46 +02:00
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
typedef int hb_atomic_int_impl_t;
|
2018-08-01 06:05:51 +02:00
|
|
|
#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
|
2015-03-27 21:49:33 +01:00
|
|
|
|
2018-08-01 04:29:49 +02:00
|
|
|
#define hb_atomic_ptr_impl_get(P) ((void *) *(P))
|
2015-03-27 21:49:33 +01:00
|
|
|
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
|
2012-05-18 02:30:46 +02:00
|
|
|
|
2012-06-05 23:27:20 +02:00
|
|
|
|
2012-05-18 02:30:46 +02:00
|
|
|
#endif
|
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
|
2018-07-16 15:41:09 +02:00
|
|
|
#ifndef HB_ATOMIC_INT_INIT
|
2018-06-01 04:31:39 +02:00
|
|
|
#define HB_ATOMIC_INT_INIT(V) {V}
|
2018-07-16 15:41:09 +02:00
|
|
|
#endif
|
2018-08-01 06:05:51 +02:00
|
|
|
#ifndef hb_atomic_int_impl_set_relaxed
|
|
|
|
#define hb_atomic_int_impl_set_relaxed(AI, V) ((AI) = (V))
|
|
|
|
#endif
|
|
|
|
#ifndef hb_atomic_int_impl_get_relaxed
|
|
|
|
#define hb_atomic_int_impl_get_relaxed(AI) (AI)
|
|
|
|
#endif
|
|
|
|
|
2015-03-27 21:49:33 +01:00
|
|
|
|
|
|
|
struct hb_atomic_int_t
|
|
|
|
{
|
2018-07-16 15:41:09 +02:00
|
|
|
mutable hb_atomic_int_impl_t v;
|
2015-03-27 21:49:33 +01:00
|
|
|
|
2018-08-01 06:05:51 +02:00
|
|
|
inline void set_relaxed (int v_) { hb_atomic_int_impl_set_relaxed (&v, v_); }
|
|
|
|
inline int get_relaxed (void) const { return hb_atomic_int_impl_get_relaxed (&v); }
|
|
|
|
inline int inc (void) { return hb_atomic_int_impl_add (&v, 1); }
|
|
|
|
inline int dec (void) { return hb_atomic_int_impl_add (&v, -1); }
|
2015-03-27 21:49:33 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
#define hb_atomic_ptr_get(P) hb_atomic_ptr_impl_get(P)
|
|
|
|
#define hb_atomic_ptr_cmpexch(P,O,N) hb_atomic_ptr_impl_cmpexch((P),(O),(N))
|
|
|
|
|
2012-05-18 02:30:46 +02:00
|
|
|
|
|
|
|
#endif /* HB_ATOMIC_PRIVATE_HH */
|