9 #ifndef BOOST_LOCKFREE_CAS_HPP_INCLUDED
10 #define BOOST_LOCKFREE_CAS_HPP_INCLUDED
12 #include <boost/lockfree/detail/prefix.hpp>
13 #include <boost/interprocess/detail/atomic.hpp>
14 #include <boost/detail/lightweight_mutex.hpp>
15 #include <boost/static_assert.hpp>
17 #include <boost/cstdint.hpp>
19 #include <boost/mpl/map.hpp>
20 #include <boost/mpl/at.hpp>
21 #include <boost/mpl/if.hpp>
22 #include <boost/mpl/long.hpp>
25 #include "emmintrin.h"
33 inline void memory_barrier(
void)
38 #elif defined(__GNUC__) && ( (__GNUC__ > 4) || ((__GNUC__ >= 4) && \
39 (__GNUC_MINOR__ >= 1))) \
40 || defined(__INTEL_COMPILER)
42 #elif defined(__GNUC__) && defined (__i386__)
43 asm volatile(
"lock; addl $0,0(%%esp)":::
"memory");
44 #elif defined(_MSC_VER) && (_MSC_VER >= 1300)
46 #elif defined(__APPLE__)
48 #elif defined(AO_HAVE_nop_full)
51 # warning "no memory barrier implemented for this platform"
55 inline void read_memory_barrier(
void)
67 static inline bool cas(C * addr, C old, C nw)
69 static boost::detail::lightweight_mutex guard;
70 boost::detail::lightweight_mutex::scoped_lock lock(guard);
86 inline bool atomic_cas_emulation(C * addr, C old, C nw)
91 using boost::uint32_t;
92 using boost::uint64_t;
96 static inline bool cas(
volatile uint32_t * addr,
100 #if defined(__GNUC__) && ( (__GNUC__ > 4) || ((__GNUC__ >= 4) && (__GNUC_MINOR__ >= 1)) ) || defined(__INTEL_COMPILER)
101 return __sync_bool_compare_and_swap(addr, old, nw);
103 return boost::interprocess::detail::atomic_cas32(addr, old, nw) == old;
106 typedef uint32_t cas_type;
108 static const bool is_lockfree =
true;
113 typedef uint64_t cas_type;
115 static inline bool cas(
volatile uint64_t * addr,
116 uint64_t
const & old,
119 #if defined(__GNUC__) && ( (__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ > 1)) \
120 || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 1) && defined(__x86_64__)) ) \
121 || defined(__INTEL_COMPILER)
122 return __sync_bool_compare_and_swap(addr, old, nw);
123 #elif defined(_M_IX86)
124 return InterlockedCompareExchange(reinterpret_cast<volatile LONG*>(addr),
125 reinterpret_cast<LONG>(nw),
126 reinterpret_cast<LONG>(old)) == old;
127 #elif defined(_M_X64)
128 return InterlockedCompareExchange(reinterpret_cast<volatile LONG*>(addr),
129 reinterpret_cast<LONG>(nw),
130 reinterpret_cast<LONG>(old)) == old;
133 #warning ("blocking CAS emulation")
134 return atomic_cas_emulation((uint64_t *)addr, old, nw);
140 static const bool is_lockfree =
false;
142 static const bool is_lockfree =
true;
148 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
149 typedef int cas_type __attribute__ ((mode (TI)));
153 bool operator==(
cas_type const & rhs)
155 return (data[0] == rhs.data[0]) &&
156 (data[1] == rhs.data[1]);
165 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
166 return __sync_bool_compare_and_swap_16(addr, old, nw);
170 return atomic_cas_emulation((
cas_type*)addr, old, nw);
176 static const bool is_lockfree =
false;
178 static const bool is_lockfree =
true;
184 using namespace boost::mpl;
195 typedef typename at<cas_map, long_<sizeof(C)> >::type atomic_cas_t;
197 typedef typename if_<has_key<cas_map, long_<sizeof(C)> >,
201 typedef typename cas_t::cas_type cas_value_t;
204 static inline bool cas(
volatile C * addr, C
const & old, C
const & nw)
206 return cas_t::cas((
volatile cas_value_t*)addr,
211 static const bool is_lockfree = cas_t::is_lockfree;
218 template <
typename C>
219 inline bool cas(
volatile C * addr, C
const & old, C
const & nw)
221 return atomic_cas<C>::cas(addr, old, nw);