1
#ifndef BOOST_ATOMIC_DETAIL_GCC_X86_HPP
2
#define BOOST_ATOMIC_DETAIL_GCC_X86_HPP
4
// Copyright (c) 2009 Helge Bahmann
5
// Copyright (c) 2012 Tim Blechmann
7
// Distributed under the Boost Software License, Version 1.0.
8
// See accompanying file LICENSE_1_0.txt or copy at
9
// http://www.boost.org/LICENSE_1_0.txt)
13
#include <boost/cstdint.hpp>
14
#include <boost/atomic/detail/config.hpp>
16
#ifdef BOOST_HAS_PRAGMA_ONCE
24
#if defined(__x86_64__) || defined(__SSE2__)
25
# define BOOST_ATOMIC_X86_FENCE_INSTR "mfence\n"
27
# define BOOST_ATOMIC_X86_FENCE_INSTR "lock ; addl $0, (%%esp)\n"
30
#define BOOST_ATOMIC_X86_PAUSE() __asm__ __volatile__ ("pause\n")
32
#if defined(__i386__) &&\
34
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) ||\
35
defined(__i586__) || defined(__i686__) || defined(__pentium4__) || defined(__nocona__) || defined(__core2__) || defined(__corei7__) ||\
36
defined(__k6__) || defined(__athlon__) || defined(__k8__) || defined(__amdfam10__) || defined(__bdver1__) || defined(__bdver2__) || defined(__bdver3__) || defined(__btver1__) || defined(__btver2__)\
38
#define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1
41
#if defined(__x86_64__) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)
42
#define BOOST_ATOMIC_X86_HAS_CMPXCHG16B 1
46
platform_fence_before(memory_order order)
50
case memory_order_relaxed:
51
case memory_order_acquire:
52
case memory_order_consume:
54
case memory_order_release:
55
case memory_order_acq_rel:
56
__asm__ __volatile__ ("" ::: "memory");
59
case memory_order_seq_cst:
60
__asm__ __volatile__ ("" ::: "memory");
68
platform_fence_after(memory_order order)
72
case memory_order_relaxed:
73
case memory_order_release:
75
case memory_order_acquire:
76
case memory_order_acq_rel:
77
__asm__ __volatile__ ("" ::: "memory");
80
case memory_order_consume:
83
case memory_order_seq_cst:
84
__asm__ __volatile__ ("" ::: "memory");
92
platform_fence_after_load(memory_order order)
96
case memory_order_relaxed:
97
case memory_order_release:
99
case memory_order_acquire:
100
case memory_order_acq_rel:
101
__asm__ __volatile__ ("" ::: "memory");
103
case memory_order_consume:
105
case memory_order_seq_cst:
106
__asm__ __volatile__ ("" ::: "memory");
113
platform_fence_before_store(memory_order order)
117
case memory_order_relaxed:
118
case memory_order_acquire:
119
case memory_order_consume:
121
case memory_order_release:
122
case memory_order_acq_rel:
123
__asm__ __volatile__ ("" ::: "memory");
126
case memory_order_seq_cst:
127
__asm__ __volatile__ ("" ::: "memory");
135
platform_fence_after_store(memory_order order)
139
case memory_order_relaxed:
140
case memory_order_release:
142
case memory_order_acquire:
143
case memory_order_acq_rel:
144
__asm__ __volatile__ ("" ::: "memory");
147
case memory_order_consume:
150
case memory_order_seq_cst:
151
__asm__ __volatile__ ("" ::: "memory");
164
atomic_flag(const atomic_flag &) /* = delete */ ;
165
atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
168
BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
171
test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
174
atomics::detail::platform_fence_before(order);
175
__asm__ __volatile__ (
177
: "+r" (v), "+m" (v_)
179
atomics::detail::platform_fence_after(order);
184
clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
186
if (order == memory_order_seq_cst) {
188
__asm__ __volatile__ (
190
: "+r" (v), "+m" (v_)
193
atomics::detail::platform_fence_before(order);
199
} /* namespace boost */
201
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
203
#include <boost/atomic/detail/base.hpp>
205
#if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
207
#define BOOST_ATOMIC_CHAR_LOCK_FREE 2
208
#define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
209
#define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
210
#define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
211
#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
212
#define BOOST_ATOMIC_INT_LOCK_FREE 2
213
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
215
#if defined(__x86_64__) || defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
216
#define BOOST_ATOMIC_LLONG_LOCK_FREE 2
219
#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG16B) && (defined(BOOST_HAS_INT128) || !defined(BOOST_NO_ALIGNMENT))
220
#define BOOST_ATOMIC_INT128_LOCK_FREE 2
223
#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
224
#define BOOST_ATOMIC_BOOL_LOCK_FREE 2
228
#define BOOST_ATOMIC_THREAD_FENCE 2
230
atomic_thread_fence(memory_order order)
234
case memory_order_relaxed:
236
case memory_order_release:
237
__asm__ __volatile__ ("" ::: "memory");
239
case memory_order_acquire:
240
__asm__ __volatile__ ("" ::: "memory");
242
case memory_order_acq_rel:
243
__asm__ __volatile__ ("" ::: "memory");
245
case memory_order_consume:
247
case memory_order_seq_cst:
248
__asm__ __volatile__ (BOOST_ATOMIC_X86_FENCE_INSTR ::: "memory");
254
#define BOOST_ATOMIC_SIGNAL_FENCE 2
256
atomic_signal_fence(memory_order)
258
__asm__ __volatile__ ("" ::: "memory");
264
template<typename T, bool Sign>
265
class base_atomic<T, int, 1, Sign>
268
typedef base_atomic this_type;
269
typedef T value_type;
270
typedef T difference_type;
273
typedef value_type value_arg_type;
276
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
277
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
280
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
282
if (order != memory_order_seq_cst) {
283
platform_fence_before(order);
284
const_cast<volatile value_type &>(v_) = v;
291
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
293
value_type v = const_cast<const volatile value_type &>(v_);
294
platform_fence_after_load(order);
299
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
301
platform_fence_before(order);
304
"lock ; xaddb %0, %1"
305
: "+q" (v), "+m" (v_)
309
platform_fence_after(order);
314
fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
316
return fetch_add(-v, order);
320
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
322
platform_fence_before(order);
326
: "+q" (v), "+m" (v_)
328
platform_fence_after(order);
333
compare_exchange_strong(
334
value_type & expected,
336
memory_order success_order,
337
memory_order failure_order) volatile BOOST_NOEXCEPT
339
value_type previous = expected;
340
platform_fence_before(success_order);
344
"lock ; cmpxchgb %3, %1\n\t"
346
: "+a" (previous), "+m" (v_), "=q" (success)
351
platform_fence_after(success_order);
353
platform_fence_after(failure_order);
359
compare_exchange_weak(
360
value_type & expected,
362
memory_order success_order,
363
memory_order failure_order) volatile BOOST_NOEXCEPT
365
return compare_exchange_strong(expected, desired, success_order, failure_order);
369
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
371
value_type tmp = load(memory_order_relaxed);
372
while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
374
BOOST_ATOMIC_X86_PAUSE();
380
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
382
value_type tmp = load(memory_order_relaxed);
383
while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
385
BOOST_ATOMIC_X86_PAUSE();
391
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
393
value_type tmp = load(memory_order_relaxed);
394
while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
396
BOOST_ATOMIC_X86_PAUSE();
402
is_lock_free(void) const volatile BOOST_NOEXCEPT
407
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
409
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
410
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
416
template<typename T, bool Sign>
417
class base_atomic<T, int, 2, Sign>
420
typedef base_atomic this_type;
421
typedef T value_type;
422
typedef T difference_type;
425
typedef value_type value_arg_type;
428
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
429
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
432
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
434
if (order != memory_order_seq_cst) {
435
platform_fence_before(order);
436
const_cast<volatile value_type &>(v_) = v;
443
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
445
value_type v = const_cast<const volatile value_type &>(v_);
446
platform_fence_after_load(order);
451
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
453
platform_fence_before(order);
456
"lock ; xaddw %0, %1"
457
: "+q" (v), "+m" (v_)
461
platform_fence_after(order);
466
fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
468
return fetch_add(-v, order);
472
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
474
platform_fence_before(order);
478
: "+q" (v), "+m" (v_)
480
platform_fence_after(order);
485
compare_exchange_strong(
486
value_type & expected,
488
memory_order success_order,
489
memory_order failure_order) volatile BOOST_NOEXCEPT
491
value_type previous = expected;
492
platform_fence_before(success_order);
496
"lock ; cmpxchgw %3, %1\n\t"
498
: "+a" (previous), "+m" (v_), "=q" (success)
503
platform_fence_after(success_order);
505
platform_fence_after(failure_order);
511
compare_exchange_weak(
512
value_type & expected,
514
memory_order success_order,
515
memory_order failure_order) volatile BOOST_NOEXCEPT
517
return compare_exchange_strong(expected, desired, success_order, failure_order);
521
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
523
value_type tmp = load(memory_order_relaxed);
524
while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
526
BOOST_ATOMIC_X86_PAUSE();
532
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
534
value_type tmp = load(memory_order_relaxed);
535
while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
537
BOOST_ATOMIC_X86_PAUSE();
543
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
545
value_type tmp = load(memory_order_relaxed);
546
while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
548
BOOST_ATOMIC_X86_PAUSE();
554
is_lock_free(void) const volatile BOOST_NOEXCEPT
559
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
561
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
562
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
568
template<typename T, bool Sign>
569
class base_atomic<T, int, 4, Sign>
572
typedef base_atomic this_type;
573
typedef T value_type;
574
typedef T difference_type;
577
typedef value_type value_arg_type;
580
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
581
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
584
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
586
if (order != memory_order_seq_cst) {
587
platform_fence_before(order);
588
const_cast<volatile value_type &>(v_) = v;
595
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
597
value_type v = const_cast<const volatile value_type &>(v_);
598
platform_fence_after_load(order);
603
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
605
platform_fence_before(order);
608
"lock ; xaddl %0, %1"
609
: "+r" (v), "+m" (v_)
613
platform_fence_after(order);
618
fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
620
return fetch_add(-v, order);
624
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
626
platform_fence_before(order);
630
: "+r" (v), "+m" (v_)
632
platform_fence_after(order);
637
compare_exchange_strong(
638
value_type & expected,
640
memory_order success_order,
641
memory_order failure_order) volatile BOOST_NOEXCEPT
643
value_type previous = expected;
644
platform_fence_before(success_order);
648
"lock ; cmpxchgl %3, %1\n\t"
650
: "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
655
platform_fence_after(success_order);
657
platform_fence_after(failure_order);
663
compare_exchange_weak(
664
value_type & expected,
666
memory_order success_order,
667
memory_order failure_order) volatile BOOST_NOEXCEPT
669
return compare_exchange_strong(expected, desired, success_order, failure_order);
673
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
675
value_type tmp = load(memory_order_relaxed);
676
while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
678
BOOST_ATOMIC_X86_PAUSE();
684
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
686
value_type tmp = load(memory_order_relaxed);
687
while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
689
BOOST_ATOMIC_X86_PAUSE();
695
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
697
value_type tmp = load(memory_order_relaxed);
698
while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
700
BOOST_ATOMIC_X86_PAUSE();
706
is_lock_free(void) const volatile BOOST_NOEXCEPT
711
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
713
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
714
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
720
#if defined(__x86_64__)
721
template<typename T, bool Sign>
722
class base_atomic<T, int, 8, Sign>
725
typedef base_atomic this_type;
726
typedef T value_type;
727
typedef T difference_type;
730
typedef value_type value_arg_type;
733
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
734
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
737
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
739
if (order != memory_order_seq_cst) {
740
platform_fence_before(order);
741
const_cast<volatile value_type &>(v_) = v;
748
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
750
value_type v = const_cast<const volatile value_type &>(v_);
751
platform_fence_after_load(order);
756
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
758
platform_fence_before(order);
761
"lock ; xaddq %0, %1"
762
: "+r" (v), "+m" (v_)
766
platform_fence_after(order);
771
fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
773
return fetch_add(-v, order);
777
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
779
platform_fence_before(order);
783
: "+r" (v), "+m" (v_)
785
platform_fence_after(order);
790
compare_exchange_strong(
791
value_type & expected,
793
memory_order success_order,
794
memory_order failure_order) volatile BOOST_NOEXCEPT
796
value_type previous = expected;
797
platform_fence_before(success_order);
801
"lock ; cmpxchgq %3, %1\n\t"
803
: "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
808
platform_fence_after(success_order);
810
platform_fence_after(failure_order);
816
compare_exchange_weak(
817
value_type & expected,
819
memory_order success_order,
820
memory_order failure_order) volatile BOOST_NOEXCEPT
822
return compare_exchange_strong(expected, desired, success_order, failure_order);
826
fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
828
value_type tmp = load(memory_order_relaxed);
829
while (!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed))
831
BOOST_ATOMIC_X86_PAUSE();
837
fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
839
value_type tmp = load(memory_order_relaxed);
840
while (!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed))
842
BOOST_ATOMIC_X86_PAUSE();
848
fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
850
value_type tmp = load(memory_order_relaxed);
851
while (!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed))
853
BOOST_ATOMIC_X86_PAUSE();
859
is_lock_free(void) const volatile BOOST_NOEXCEPT
864
BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
866
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
867
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
877
// NOTE: x32 target is still regarded to as x86_64 and can only be detected by the size of pointers
878
#if !defined(__x86_64__) || (defined(__SIZEOF_POINTER__) && __SIZEOF_POINTER__ == 4)
881
class base_atomic<void *, void *, 4, Sign>
884
typedef base_atomic this_type;
885
typedef std::ptrdiff_t difference_type;
886
typedef void * value_type;
889
typedef value_type value_arg_type;
892
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
893
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
896
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
898
if (order != memory_order_seq_cst) {
899
platform_fence_before(order);
900
const_cast<volatile value_type &>(v_) = v;
906
value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
908
value_type v = const_cast<const volatile value_type &>(v_);
909
platform_fence_after_load(order);
913
value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
915
platform_fence_before(order);
919
: "+r" (v), "+m" (v_)
921
platform_fence_after(order);
925
bool compare_exchange_strong(value_type & expected, value_type desired,
926
memory_order success_order,
927
memory_order failure_order) volatile BOOST_NOEXCEPT
929
value_type previous = expected;
930
platform_fence_before(success_order);
934
"lock ; cmpxchgl %3, %1\n\t"
936
: "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
941
platform_fence_after(success_order);
943
platform_fence_after(failure_order);
948
bool compare_exchange_weak(value_type & expected, value_type desired,
949
memory_order success_order,
950
memory_order failure_order) volatile BOOST_NOEXCEPT
952
return compare_exchange_strong(expected, desired, success_order, failure_order);
956
is_lock_free(void) const volatile BOOST_NOEXCEPT
962
fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
964
platform_fence_before(order);
967
"lock ; xaddl %0, %1"
968
: "+r" (v), "+m" (v_)
972
platform_fence_after(order);
973
return reinterpret_cast<value_type>(v);
977
fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
979
return fetch_add(-v, order);
982
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
984
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
985
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
991
template<typename T, bool Sign>
992
class base_atomic<T *, void *, 4, Sign>
995
typedef base_atomic this_type;
996
typedef T * value_type;
997
typedef std::ptrdiff_t difference_type;
1000
typedef value_type value_arg_type;
1003
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1004
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
1007
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1009
if (order != memory_order_seq_cst) {
1010
platform_fence_before(order);
1011
const_cast<volatile value_type &>(v_) = v;
1018
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1020
value_type v = const_cast<const volatile value_type &>(v_);
1021
platform_fence_after_load(order);
1026
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1028
platform_fence_before(order);
1029
__asm__ __volatile__
1032
: "+r" (v), "+m" (v_)
1034
platform_fence_after(order);
1039
compare_exchange_strong(
1040
value_type & expected,
1042
memory_order success_order,
1043
memory_order failure_order) volatile BOOST_NOEXCEPT
1045
value_type previous = expected;
1046
platform_fence_before(success_order);
1048
__asm__ __volatile__
1050
"lock ; cmpxchgl %3, %1\n\t"
1052
: "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
1057
platform_fence_after(success_order);
1059
platform_fence_after(failure_order);
1060
expected = previous;
1065
compare_exchange_weak(
1066
value_type & expected,
1068
memory_order success_order,
1069
memory_order failure_order) volatile BOOST_NOEXCEPT
1071
return compare_exchange_strong(expected, desired, success_order, failure_order);
1075
fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1077
v = v * sizeof(*v_);
1078
platform_fence_before(order);
1079
__asm__ __volatile__
1081
"lock ; xaddl %0, %1"
1082
: "+r" (v), "+m" (v_)
1086
platform_fence_after(order);
1087
return reinterpret_cast<value_type>(v);
1091
fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1093
return fetch_add(-v, order);
1097
is_lock_free(void) const volatile BOOST_NOEXCEPT
1102
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
1104
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1105
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1114
class base_atomic<void *, void *, 8, Sign>
1117
typedef base_atomic this_type;
1118
typedef std::ptrdiff_t difference_type;
1119
typedef void * value_type;
1122
typedef value_type value_arg_type;
1125
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1126
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
1129
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1131
if (order != memory_order_seq_cst) {
1132
platform_fence_before(order);
1133
const_cast<volatile value_type &>(v_) = v;
1139
value_type load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1141
value_type v = const_cast<const volatile value_type &>(v_);
1142
platform_fence_after_load(order);
1146
value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1148
platform_fence_before(order);
1149
__asm__ __volatile__
1152
: "+r" (v), "+m" (v_)
1154
platform_fence_after(order);
1158
bool compare_exchange_strong(value_type & expected, value_type desired,
1159
memory_order success_order,
1160
memory_order failure_order) volatile BOOST_NOEXCEPT
1162
value_type previous = expected;
1163
platform_fence_before(success_order);
1165
__asm__ __volatile__
1167
"lock ; cmpxchgq %3, %1\n\t"
1169
: "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
1174
platform_fence_after(success_order);
1176
platform_fence_after(failure_order);
1177
expected = previous;
1181
bool compare_exchange_weak(value_type & expected, value_type desired,
1182
memory_order success_order,
1183
memory_order failure_order) volatile BOOST_NOEXCEPT
1185
return compare_exchange_strong(expected, desired, success_order, failure_order);
1189
is_lock_free(void) const volatile BOOST_NOEXCEPT
1195
fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1197
platform_fence_before(order);
1198
__asm__ __volatile__
1200
"lock ; xaddq %0, %1"
1201
: "+r" (v), "+m" (v_)
1205
platform_fence_after(order);
1206
return reinterpret_cast<value_type>(v);
1210
fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1212
return fetch_add(-v, order);
1215
BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
1217
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1218
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1224
template<typename T, bool Sign>
1225
class base_atomic<T *, void *, 8, Sign>
1228
typedef base_atomic this_type;
1229
typedef T * value_type;
1230
typedef std::ptrdiff_t difference_type;
1233
typedef value_type value_arg_type;
1236
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1237
BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
1240
store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1242
if (order != memory_order_seq_cst) {
1243
platform_fence_before(order);
1244
const_cast<volatile value_type &>(v_) = v;
1251
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1253
value_type v = const_cast<const volatile value_type &>(v_);
1254
platform_fence_after_load(order);
1259
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1261
platform_fence_before(order);
1262
__asm__ __volatile__
1265
: "+r" (v), "+m" (v_)
1267
platform_fence_after(order);
1272
compare_exchange_strong(
1273
value_type & expected,
1275
memory_order success_order,
1276
memory_order failure_order) volatile BOOST_NOEXCEPT
1278
value_type previous = expected;
1279
platform_fence_before(success_order);
1281
__asm__ __volatile__
1283
"lock ; cmpxchgq %3, %1\n\t"
1285
: "+a,a" (previous), "+m,m" (v_), "=q,m" (success)
1290
platform_fence_after(success_order);
1292
platform_fence_after(failure_order);
1293
expected = previous;
1298
compare_exchange_weak(
1299
value_type & expected,
1301
memory_order success_order,
1302
memory_order failure_order) volatile BOOST_NOEXCEPT
1304
return compare_exchange_strong(expected, desired, success_order, failure_order);
1308
fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1310
v = v * sizeof(*v_);
1311
platform_fence_before(order);
1312
__asm__ __volatile__
1314
"lock ; xaddq %0, %1"
1315
: "+r" (v), "+m" (v_)
1319
platform_fence_after(order);
1320
return reinterpret_cast<value_type>(v);
1324
fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1326
return fetch_add(-v, order);
1330
is_lock_free(void) const volatile BOOST_NOEXCEPT
1335
BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
1337
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1338
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1346
template<typename T, bool Sign>
1347
class base_atomic<T, void, 1, Sign>
1350
typedef base_atomic this_type;
1351
typedef T value_type;
1352
typedef uint8_t storage_type;
1355
typedef value_type const& value_arg_type;
1358
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1359
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT :
1360
v_(reinterpret_cast<storage_type const&>(v))
1365
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1367
if (order != memory_order_seq_cst) {
1369
memcpy(&tmp, &v, sizeof(value_type));
1370
platform_fence_before(order);
1371
const_cast<volatile storage_type &>(v_) = tmp;
1378
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1380
storage_type tmp = const_cast<volatile storage_type &>(v_);
1381
platform_fence_after_load(order);
1383
memcpy(&v, &tmp, sizeof(value_type));
1388
exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1391
memcpy(&tmp, &v, sizeof(value_type));
1392
platform_fence_before(order);
1393
__asm__ __volatile__
1396
: "+q" (tmp), "+m" (v_)
1398
platform_fence_after(order);
1400
memcpy(&res, &tmp, sizeof(value_type));
1405
compare_exchange_strong(
1406
value_type & expected,
1407
value_type const& desired,
1408
memory_order success_order,
1409
memory_order failure_order) volatile BOOST_NOEXCEPT
1411
storage_type expected_s, desired_s;
1412
memcpy(&expected_s, &expected, sizeof(value_type));
1413
memcpy(&desired_s, &desired, sizeof(value_type));
1414
storage_type previous_s = expected_s;
1415
platform_fence_before(success_order);
1417
__asm__ __volatile__
1419
"lock ; cmpxchgb %3, %1\n\t"
1421
: "+a" (previous_s), "+m" (v_), "=q" (success)
1426
platform_fence_after(success_order);
1428
platform_fence_after(failure_order);
1429
memcpy(&expected, &previous_s, sizeof(value_type));
1434
compare_exchange_weak(
1435
value_type & expected,
1436
value_type const& desired,
1437
memory_order success_order,
1438
memory_order failure_order) volatile BOOST_NOEXCEPT
1440
return compare_exchange_strong(expected, desired, success_order, failure_order);
1444
is_lock_free(void) const volatile BOOST_NOEXCEPT
1449
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
1451
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1452
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1458
template<typename T, bool Sign>
1459
class base_atomic<T, void, 2, Sign>
1462
typedef base_atomic this_type;
1463
typedef T value_type;
1464
typedef uint16_t storage_type;
1467
typedef value_type const& value_arg_type;
1470
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1471
BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT :
1472
v_(reinterpret_cast<storage_type const&>(v))
1477
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1479
if (order != memory_order_seq_cst) {
1481
memcpy(&tmp, &v, sizeof(value_type));
1482
platform_fence_before(order);
1483
const_cast<volatile storage_type &>(v_) = tmp;
1490
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1492
storage_type tmp = const_cast<volatile storage_type &>(v_);
1493
platform_fence_after_load(order);
1495
memcpy(&v, &tmp, sizeof(value_type));
1500
exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1503
memcpy(&tmp, &v, sizeof(value_type));
1504
platform_fence_before(order);
1505
__asm__ __volatile__
1508
: "+q" (tmp), "+m" (v_)
1510
platform_fence_after(order);
1512
memcpy(&res, &tmp, sizeof(value_type));
1517
compare_exchange_strong(
1518
value_type & expected,
1519
value_type const& desired,
1520
memory_order success_order,
1521
memory_order failure_order) volatile BOOST_NOEXCEPT
1523
storage_type expected_s, desired_s;
1524
memcpy(&expected_s, &expected, sizeof(value_type));
1525
memcpy(&desired_s, &desired, sizeof(value_type));
1526
storage_type previous_s = expected_s;
1527
platform_fence_before(success_order);
1529
__asm__ __volatile__
1531
"lock ; cmpxchgw %3, %1\n\t"
1533
: "+a" (previous_s), "+m" (v_), "=q" (success)
1538
platform_fence_after(success_order);
1540
platform_fence_after(failure_order);
1541
memcpy(&expected, &previous_s, sizeof(value_type));
1546
compare_exchange_weak(
1547
value_type & expected,
1548
value_type const& desired,
1549
memory_order success_order,
1550
memory_order failure_order) volatile BOOST_NOEXCEPT
1552
return compare_exchange_strong(expected, desired, success_order, failure_order);
1556
is_lock_free(void) const volatile BOOST_NOEXCEPT
1561
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
1563
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1564
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1570
template<typename T, bool Sign>
1571
class base_atomic<T, void, 4, Sign>
1574
typedef base_atomic this_type;
1575
typedef T value_type;
1576
typedef uint32_t storage_type;
1579
typedef value_type const& value_arg_type;
1582
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1583
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
1585
memcpy(&v_, &v, sizeof(value_type));
1589
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1591
if (order != memory_order_seq_cst) {
1592
storage_type tmp = 0;
1593
memcpy(&tmp, &v, sizeof(value_type));
1594
platform_fence_before(order);
1595
const_cast<volatile storage_type &>(v_) = tmp;
1602
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1604
storage_type tmp = const_cast<volatile storage_type &>(v_);
1605
platform_fence_after_load(order);
1607
memcpy(&v, &tmp, sizeof(value_type));
1612
exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1614
storage_type tmp = 0;
1615
memcpy(&tmp, &v, sizeof(value_type));
1616
platform_fence_before(order);
1617
__asm__ __volatile__
1620
: "+q" (tmp), "+m" (v_)
1622
platform_fence_after(order);
1624
memcpy(&res, &tmp, sizeof(value_type));
1629
compare_exchange_strong(
1630
value_type & expected,
1631
value_type const& desired,
1632
memory_order success_order,
1633
memory_order failure_order) volatile BOOST_NOEXCEPT
1635
storage_type expected_s = 0, desired_s = 0;
1636
memcpy(&expected_s, &expected, sizeof(value_type));
1637
memcpy(&desired_s, &desired, sizeof(value_type));
1638
storage_type previous_s = expected_s;
1639
platform_fence_before(success_order);
1641
__asm__ __volatile__
1643
"lock ; cmpxchgl %3, %1\n\t"
1645
: "+a,a" (previous_s), "+m,m" (v_), "=q,m" (success)
1650
platform_fence_after(success_order);
1652
platform_fence_after(failure_order);
1653
memcpy(&expected, &previous_s, sizeof(value_type));
1658
compare_exchange_weak(
1659
value_type & expected,
1660
value_type const& desired,
1661
memory_order success_order,
1662
memory_order failure_order) volatile BOOST_NOEXCEPT
1664
return compare_exchange_strong(expected, desired, success_order, failure_order);
1668
is_lock_free(void) const volatile BOOST_NOEXCEPT
1673
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
1675
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1676
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1682
#if defined(__x86_64__)
1683
template<typename T, bool Sign>
1684
class base_atomic<T, void, 8, Sign>
1687
typedef base_atomic this_type;
1688
typedef T value_type;
1689
typedef uint64_t storage_type;
1692
typedef value_type const& value_arg_type;
1695
BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
1696
explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
1698
memcpy(&v_, &v, sizeof(value_type));
1702
store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1704
if (order != memory_order_seq_cst) {
1705
storage_type tmp = 0;
1706
memcpy(&tmp, &v, sizeof(value_type));
1707
platform_fence_before(order);
1708
const_cast<volatile storage_type &>(v_) = tmp;
1715
load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
1717
storage_type tmp = const_cast<volatile storage_type &>(v_);
1718
platform_fence_after_load(order);
1720
memcpy(&v, &tmp, sizeof(value_type));
1725
exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
1727
storage_type tmp = 0;
1728
memcpy(&tmp, &v, sizeof(value_type));
1729
platform_fence_before(order);
1730
__asm__ __volatile__
1733
: "+q" (tmp), "+m" (v_)
1735
platform_fence_after(order);
1737
memcpy(&res, &tmp, sizeof(value_type));
1742
compare_exchange_strong(
1743
value_type & expected,
1744
value_type const& desired,
1745
memory_order success_order,
1746
memory_order failure_order) volatile BOOST_NOEXCEPT
1748
storage_type expected_s = 0, desired_s = 0;
1749
memcpy(&expected_s, &expected, sizeof(value_type));
1750
memcpy(&desired_s, &desired, sizeof(value_type));
1751
storage_type previous_s = expected_s;
1752
platform_fence_before(success_order);
1754
__asm__ __volatile__
1756
"lock ; cmpxchgq %3, %1\n\t"
1758
: "+a,a" (previous_s), "+m,m" (v_), "=q,m" (success)
1763
platform_fence_after(success_order);
1765
platform_fence_after(failure_order);
1766
memcpy(&expected, &previous_s, sizeof(value_type));
1771
compare_exchange_weak(
1772
value_type & expected,
1773
value_type const& desired,
1774
memory_order success_order,
1775
memory_order failure_order) volatile BOOST_NOEXCEPT
1777
return compare_exchange_strong(expected, desired, success_order, failure_order);
1781
is_lock_free(void) const volatile BOOST_NOEXCEPT
1786
BOOST_ATOMIC_DECLARE_BASE_OPERATORS
1788
BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
1789
BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
1796
#if !defined(__x86_64__) && defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
1798
template<typename T>
1800
platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) BOOST_NOEXCEPT
1802
#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8
1803
const T oldval = __sync_val_compare_and_swap(ptr, expected, desired);
1804
const bool result = (oldval == expected);
1809
/* Make sure ebx is saved and restored properly in case
1810
this object is compiled as "position independent". Since
1811
programmers on x86 tend to forget specifying -DPIC or
1812
similar, always assume PIC.
1814
To make this work uniformly even in the non-PIC case,
1815
setup register constraints such that ebx can not be
1816
used by accident e.g. as base address for the variable
1817
to be modified. Accessing "scratch" should always be okay,
1818
as it can only be placed on the stack (and therefore
1819
accessed through ebp or esp only).
1821
In theory, could push/pop ebx onto/off the stack, but movs
1822
to a prepared stack slot turn out to be faster. */
1824
__asm__ __volatile__
1826
"movl %%ebx, %[scratch]\n\t"
1827
"movl %[desired_lo], %%ebx\n\t"
1828
"lock; cmpxchg8b %[dest]\n\t"
1829
"movl %[scratch], %%ebx\n\t"
1831
: "+A,A,A,A,A,A" (expected), [dest] "+m,m,m,m,m,m" (*ptr), [scratch] "=m,m,m,m,m,m" (scratch), [success] "=q,m,q,m,q,m" (success)
1832
: [desired_lo] "S,S,D,D,m,m" ((uint32_t)desired), "c,c,c,c,c,c" ((uint32_t)(desired >> 32))
1839
// Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations:
1841
// The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically:
1842
// * Reading or writing a quadword aligned on a 64-bit boundary
1844
// Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations
1845
// have at least 8 byte alignment. The only unfortunate case is when atomic is placeod on the stack and it is not 8-byte aligned (like on 32 bit Windows).
1847
template<typename T>
1849
platform_store64(T value, volatile T * ptr) BOOST_NOEXCEPT
1851
if (((uint32_t)ptr & 0x00000007) == 0)
1853
#if defined(__SSE2__)
1854
__asm__ __volatile__
1856
"movq %1, %%xmm4\n\t"
1857
"movq %%xmm4, %0\n\t"
1863
__asm__ __volatile__
1876
__asm__ __volatile__
1878
"movl %%ebx, %[scratch]\n\t"
1879
"movl %[value_lo], %%ebx\n\t"
1880
"movl 0(%[dest]), %%eax\n\t"
1881
"movl 4(%[dest]), %%edx\n\t"
1883
"1: lock; cmpxchg8b 0(%[dest])\n\t"
1885
"movl %[scratch], %%ebx"
1886
: [scratch] "=m,m" (scratch)
1887
: [value_lo] "a,a" ((uint32_t)value), "c,c" ((uint32_t)(value >> 32)), [dest] "D,S" (ptr)
1888
: "memory", "cc", "edx"
1893
template<typename T>
1895
platform_load64(const volatile T * ptr) BOOST_NOEXCEPT
1899
if (((uint32_t)ptr & 0x00000007) == 0)
1901
#if defined(__SSE2__)
1902
__asm__ __volatile__
1904
"movq %1, %%xmm4\n\t"
1905
"movq %%xmm4, %0\n\t"
1911
__asm__ __volatile__
1923
// We don't care for comparison result here; the previous value will be stored into value anyway.
1924
// Also we don't care for ebx and ecx values, they just have to be equal to eax and edx before cmpxchg8b.
1925
__asm__ __volatile__
1927
"movl %%ebx, %%eax\n\t"
1928
"movl %%ecx, %%edx\n\t"
1929
"lock; cmpxchg8b %[dest]"
1941
#if defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
1943
template<typename T>
1945
platform_cmpxchg128_strong(T& expected, T desired, volatile T* ptr) BOOST_NOEXCEPT
1947
uint64_t const* p_desired = (uint64_t const*)&desired;
1949
__asm__ __volatile__
1951
"lock; cmpxchg16b %[dest]\n\t"
1953
: "+A,A" (expected), [dest] "+m,m" (*ptr), [success] "=q,m" (success)
1954
: "b,b" (p_desired[0]), "c,c" (p_desired[1])
1960
template<typename T>
1962
platform_store128(T value, volatile T* ptr) BOOST_NOEXCEPT
1964
uint64_t const* p_value = (uint64_t const*)&value;
1965
__asm__ __volatile__
1967
"movq 0(%[dest]), %%rax\n\t"
1968
"movq 8(%[dest]), %%rdx\n\t"
1970
"1: lock; cmpxchg16b 0(%[dest])\n\t"
1973
: "b" (p_value[0]), "c" (p_value[1]), [dest] "r" (ptr)
1974
: "memory", "cc", "rax", "rdx"
1978
template<typename T>
1980
platform_load128(const volatile T* ptr) BOOST_NOEXCEPT
1984
// We don't care for comparison result here; the previous value will be stored into value anyway.
1985
// Also we don't care for rbx and rcx values, they just have to be equal to rax and rdx before cmpxchg16b.
1986
__asm__ __volatile__
1988
"movq %%rbx, %%rax\n\t"
1989
"movq %%rcx, %%rdx\n\t"
1990
"lock; cmpxchg16b %[dest]"
1999
#endif // defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
2005
/* pull in 64-bit atomic type using cmpxchg8b above */
2006
#if !defined(__x86_64__) && defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
2007
#include <boost/atomic/detail/cas64strong.hpp>
2010
/* pull in 128-bit atomic type using cmpxchg16b above */
2011
#if defined(BOOST_ATOMIC_INT128_LOCK_FREE) && BOOST_ATOMIC_INT128_LOCK_FREE > 0
2012
#include <boost/atomic/detail/cas128strong.hpp>
2015
#endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */