1
#ifndef BOOST_ATOMIC_DETAIL_GCC_ARMV6PLUS_HPP
2
#define BOOST_ATOMIC_DETAIL_GCC_ARMV6PLUS_HPP
4
// Distributed under the Boost Software License, Version 1.0.
5
// See accompanying file LICENSE_1_0.txt or copy at
6
// http://www.boost.org/LICENSE_1_0.txt)
8
// Copyright (c) 2009 Helge Bahmann
9
// Copyright (c) 2009 Phil Endecott
10
// Copyright (c) 2013 Tim Blechmann
11
// ARM Code by Phil Endecott, based on other architectures.
14
#include <boost/cstdint.hpp>
15
#include <boost/atomic/detail/config.hpp>
17
#ifdef BOOST_ATOMIC_HAS_PRAGMA_ONCE
21
// From the ARM Architecture Reference Manual for architecture v6:
23
// LDREX{<cond>} <Rd>, [<Rn>]
24
// <Rd> Specifies the destination register for the memory word addressed by <Rd>
25
// <Rn> Specifies the register containing the address.
27
// STREX{<cond>} <Rd>, <Rm>, [<Rn>]
28
// <Rd> Specifies the destination register for the returned status value.
29
// 0 if the operation updates memory
30
// 1 if the operation fails to update memory
31
// <Rm> Specifies the register containing the word to be stored to memory.
32
// <Rn> Specifies the register containing the address.
33
// Rd must not be the same register as Rm or Rn.
35
// ARM v7 is like ARM v6 plus:
36
// There are half-word and byte versions of the LDREX and STREX instructions,
37
// LDREXH, LDREXB, STREXH and STREXB.
38
// There are also double-word versions, LDREXD and STREXD.
39
// (Actually it looks like these are available from version 6k onwards.)
40
// FIXME these are not yet used; should be mostly a matter of copy-and-paste.
41
// I think you can supply an immediate offset to the address.
43
// A memory barrier is effected using a "co-processor 15" instruction,
44
// though a separate assembler mnemonic is available for it in v7.
50
// "Thumb 1" is a subset of the ARM instruction set that uses a 16-bit encoding. It
51
// doesn't include all instructions and in particular it doesn't include the co-processor
52
// instruction used for the memory barrier or the load-locked/store-conditional
53
// instructions. So, if we're compiling in "Thumb 1" mode, we need to wrap all of our
54
// asm blocks with code to temporarily change to ARM mode.
56
// You can only change between ARM and Thumb modes when branching using the bx instruction.
57
// bx takes an address specified in a register. The least significant bit of the address
58
// indicates the mode, so 1 is added to indicate that the destination code is Thumb.
59
// A temporary register is needed for the address and is passed as an argument to these
60
// macros. It must be one of the "low" registers accessible to Thumb code, specified
61
// using the "l" attribute in the asm statement.
63
// Architecture v7 introduces "Thumb 2", which does include (almost?) all of the ARM
64
// instruction set. So in v7 we don't need to change to ARM mode; we can write "universal
65
// assembler" which will assemble to Thumb 2 or ARM code as appropriate. The only thing
66
// we need to do to make this "universal" assembler mode work is to insert "IT" instructions
67
// to annotate the conditional instructions. These are ignored in other modes (e.g. v6),
68
// so they can always be present.
70
#if defined(__thumb__) && !defined(__ARM_ARCH_7A__)
71
// FIXME also other v7 variants.
72
#define BOOST_ATOMIC_ARM_ASM_START(TMPREG) "adr " #TMPREG ", 1f\n" "bx " #TMPREG "\n" ".arm\n" ".align 4\n" "1: "
73
#define BOOST_ATOMIC_ARM_ASM_END(TMPREG) "adr " #TMPREG ", 1f + 1\n" "bx " #TMPREG "\n" ".thumb\n" ".align 2\n" "1: "
76
// The tmpreg is wasted in this case, which is non-optimal.
77
#define BOOST_ATOMIC_ARM_ASM_START(TMPREG)
78
#define BOOST_ATOMIC_ARM_ASM_END(TMPREG)
81
#if defined(__ARM_ARCH_7A__)
83
#define BOOST_ATOMIC_ARM_DMB "dmb\n"
85
#define BOOST_ATOMIC_ARM_DMB "mcr\tp15, 0, r0, c7, c10, 5\n"
92
__asm__ __volatile__ (
93
BOOST_ATOMIC_ARM_ASM_START(%0)
95
BOOST_ATOMIC_ARM_ASM_END(%0)
96
: "=&l" (brtmp) :: "memory"
101
platform_fence_before(memory_order order)
104
case memory_order_release:
105
case memory_order_acq_rel:
106
case memory_order_seq_cst:
108
case memory_order_consume:
114
platform_fence_after(memory_order order)
117
case memory_order_acquire:
118
case memory_order_acq_rel:
119
case memory_order_seq_cst:
126
platform_fence_before_store(memory_order order)
128
platform_fence_before(order);
132
platform_fence_after_store(memory_order order)
134
if (order == memory_order_seq_cst)
139
platform_fence_after_load(memory_order order)
141
platform_fence_after(order);
146
platform_cmpxchg32(T & expected, T desired, volatile T * ptr)
151
BOOST_ATOMIC_ARM_ASM_START(%2)
152
"mov %1, #0\n" // success = 0
153
"ldrex %0, %3\n" // expected' = *(&i)
154
"teq %0, %4\n" // flags = expected'==expected
156
"strexeq %2, %5, %3\n" // if (flags.equal) *(&i) = desired, tmp = !OK
157
"teqeq %2, #0\n" // if (flags.equal) flags = tmp==0
158
"moveq %1, #1\n" // if (flags.equal) success = 1
159
BOOST_ATOMIC_ARM_ASM_END(%2)
160
: "=&r" (expected), // %0
161
"=&r" (success), // %1
164
: "r" (expected), // %4
174
#define BOOST_ATOMIC_THREAD_FENCE 2
176
atomic_thread_fence(memory_order order)
179
case memory_order_acquire:
180
case memory_order_release:
181
case memory_order_acq_rel:
182
case memory_order_seq_cst:
183
atomics::detail::arm_barrier();
188
#define BOOST_ATOMIC_SIGNAL_FENCE 2
190
atomic_signal_fence(memory_order)
192
__asm__ __volatile__ ("" ::: "memory");
198
atomic_flag(const atomic_flag &) /* = delete */ ;
199
atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
202
BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
205
clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
207
atomics::detail::platform_fence_before_store(order);
208
const_cast<volatile uint32_t &>(v_) = 0;
209
atomics::detail::platform_fence_after_store(order);
213
test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
215
atomics::detail::platform_fence_before(order);
216
uint32_t expected = v_;
220
} while (!atomics::detail::platform_cmpxchg32(expected, (uint32_t)1, &v_));
221
atomics::detail::platform_fence_after(order);
225
#define BOOST_ATOMIC_FLAG_LOCK_FREE 2
229
#undef BOOST_ATOMIC_ARM_ASM_START
230
#undef BOOST_ATOMIC_ARM_ASM_END
232
#include <boost/atomic/detail/base.hpp>
234
#if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
236
#define BOOST_ATOMIC_CHAR_LOCK_FREE 2
237
#define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
238
#define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
239
#define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
240
#define BOOST_ATOMIC_SHORT_LOCK_FREE 2
241
#define BOOST_ATOMIC_INT_LOCK_FREE 2
242
#define BOOST_ATOMIC_LONG_LOCK_FREE 2
243
#define BOOST_ATOMIC_LLONG_LOCK_FREE 0
244
#define BOOST_ATOMIC_POINTER_LOCK_FREE 2
245
#define BOOST_ATOMIC_BOOL_LOCK_FREE 2
247
#include <boost/atomic/detail/cas32weak.hpp>
249
#endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */