24
24
* This code requires a 486 or newer processor.
26
#ifndef ETHREAD_I386_ATOMIC_H
27
#define ETHREAD_I386_ATOMIC_H
29
/* An atomic is an aligned long accessed via locked operations.
27
#undef ETHR_INCLUDE_ATOMIC_IMPL__
28
#if !defined(ETHR_X86_ATOMIC32_H__) && defined(ETHR_ATOMIC_WANT_32BIT_IMPL__)
29
#define ETHR_X86_ATOMIC32_H__
30
#define ETHR_INCLUDE_ATOMIC_IMPL__ 4
31
#undef ETHR_ATOMIC_WANT_32BIT_IMPL__
32
#elif !defined(ETHR_X86_ATOMIC64_H__) && defined(ETHR_ATOMIC_WANT_64BIT_IMPL__)
33
#define ETHR_X86_ATOMIC64_H__
34
#define ETHR_INCLUDE_ATOMIC_IMPL__ 8
35
#undef ETHR_ATOMIC_WANT_64BIT_IMPL__
38
#ifdef ETHR_INCLUDE_ATOMIC_IMPL__
40
#ifndef ETHR_X86_ATOMIC_COMMON__
41
#define ETHR_X86_ATOMIC_COMMON__
43
#define ETHR_ATOMIC_HAVE_INC_DEC_INSTRUCTIONS 1
45
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
46
#define ETHR_MEMORY_BARRIER __asm__ __volatile__("mfence" : : : "memory")
47
#define ETHR_WRITE_MEMORY_BARRIER __asm__ __volatile__("sfence" : : : "memory")
48
#define ETHR_READ_MEMORY_BARRIER __asm__ __volatile__("lfence" : : : "memory")
49
#define ETHR_READ_DEPEND_MEMORY_BARRIER __asm__ __volatile__("" : : : "memory")
51
#define ETHR_MEMORY_BARRIER \
53
volatile ethr_sint32_t x___ = 0; \
54
__asm__ __volatile__("lock; incl %0" : "=m"(x___) : "m"(x___) : "memory"); \
58
#endif /* ETHR_X86_ATOMIC_COMMON__ */
60
#if ETHR_INCLUDE_ATOMIC_IMPL__ == 4
61
#define ETHR_HAVE_NATIVE_ATOMIC32 1
62
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic32_ ## X
63
#define ETHR_ATMC_T__ ethr_native_atomic32_t
64
#define ETHR_AINT_T__ ethr_sint32_t
65
#define ETHR_AINT_SUFFIX__ "l"
66
#elif ETHR_INCLUDE_ATOMIC_IMPL__ == 8
67
#define ETHR_HAVE_NATIVE_ATOMIC64 1
68
#define ETHR_NATMC_FUNC__(X) ethr_native_atomic64_ ## X
69
#define ETHR_ATMC_T__ ethr_native_atomic64_t
70
#define ETHR_AINT_T__ ethr_sint64_t
71
#define ETHR_AINT_SUFFIX__ "q"
73
#error "Unsupported integer size"
76
/* An atomic is an aligned ETHR_AINT_T__ accessed via locked operations.
32
volatile long counter;
33
} ethr_native_atomic_t;
35
#ifdef ETHR_TRY_INLINE_FUNCS
38
#define LONG_SUFFIX "q"
40
#define LONG_SUFFIX "l"
43
static ETHR_INLINE void
44
ethr_native_atomic_init(ethr_native_atomic_t *var, long i)
48
#define ethr_native_atomic_set(v, i) ethr_native_atomic_init((v), (i))
50
static ETHR_INLINE long
51
ethr_native_atomic_read(ethr_native_atomic_t *var)
79
volatile ETHR_AINT_T__ counter;
82
#if defined(ETHR_TRY_INLINE_FUNCS) || defined(ETHR_ATOMIC_IMPL__)
84
static ETHR_INLINE ETHR_AINT_T__ *
85
ETHR_NATMC_FUNC__(addr)(ETHR_ATMC_T__ *var)
87
return (ETHR_AINT_T__ *) &var->counter;
90
static ETHR_INLINE void
91
ETHR_NATMC_FUNC__(init)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
96
static ETHR_INLINE void
97
ETHR_NATMC_FUNC__(set)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
102
static ETHR_INLINE ETHR_AINT_T__
103
ETHR_NATMC_FUNC__(read)(ETHR_ATMC_T__ *var)
53
105
return var->counter;
56
108
static ETHR_INLINE void
57
ethr_native_atomic_add(ethr_native_atomic_t *var, long incr)
109
ETHR_NATMC_FUNC__(add)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
59
111
__asm__ __volatile__(
60
"lock; add" LONG_SUFFIX " %1, %0"
112
"lock; add" ETHR_AINT_SUFFIX__ " %1, %0"
61
113
: "=m"(var->counter)
62
114
: "ir"(incr), "m"(var->counter));
65
117
static ETHR_INLINE void
66
ethr_native_atomic_inc(ethr_native_atomic_t *var)
118
ETHR_NATMC_FUNC__(inc)(ETHR_ATMC_T__ *var)
68
120
__asm__ __volatile__(
69
"lock; inc" LONG_SUFFIX " %0"
121
"lock; inc" ETHR_AINT_SUFFIX__ " %0"
70
122
: "=m"(var->counter)
71
123
: "m"(var->counter));
74
126
static ETHR_INLINE void
75
ethr_native_atomic_dec(ethr_native_atomic_t *var)
127
ETHR_NATMC_FUNC__(dec)(ETHR_ATMC_T__ *var)
77
129
__asm__ __volatile__(
78
"lock; dec" LONG_SUFFIX " %0"
130
"lock; dec" ETHR_AINT_SUFFIX__ " %0"
79
131
: "=m"(var->counter)
80
132
: "m"(var->counter));
83
static ETHR_INLINE long
84
ethr_native_atomic_add_return(ethr_native_atomic_t *var, long incr)
135
static ETHR_INLINE ETHR_AINT_T__
136
ETHR_NATMC_FUNC__(add_return)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ incr)
89
141
__asm__ __volatile__(
90
"lock; xadd" LONG_SUFFIX " %0, %1" /* xadd didn't exist prior to the 486 */
142
"lock; xadd" ETHR_AINT_SUFFIX__ " %0, %1" /* xadd didn't exist prior to the 486 */
92
144
: "m"(var->counter), "0"(tmp));
93
145
/* now tmp is the atomic's previous value */
94
146
return tmp + incr;
97
#define ethr_native_atomic_inc_return(var) ethr_native_atomic_add_return((var), 1)
98
#define ethr_native_atomic_dec_return(var) ethr_native_atomic_add_return((var), -1)
100
static ETHR_INLINE long
101
ethr_native_atomic_cmpxchg(ethr_native_atomic_t *var, long new, long old)
149
static ETHR_INLINE ETHR_AINT_T__
150
ETHR_NATMC_FUNC__(inc_return)(ETHR_ATMC_T__ *var)
152
return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) 1);
155
static ETHR_INLINE ETHR_AINT_T__
156
ETHR_NATMC_FUNC__(dec_return)(ETHR_ATMC_T__ *var)
158
return ETHR_NATMC_FUNC__(add_return)(var, (ETHR_AINT_T__) -1);
161
static ETHR_INLINE ETHR_AINT_T__
162
ETHR_NATMC_FUNC__(cmpxchg)(ETHR_ATMC_T__ *var,
103
166
__asm__ __volatile__(
104
"lock; cmpxchg" LONG_SUFFIX " %2, %3"
167
"lock; cmpxchg" ETHR_AINT_SUFFIX__ " %2, %3"
105
168
: "=a"(old), "=m"(var->counter)
106
169
: "r"(new), "m"(var->counter), "0"(old)
107
170
: "cc", "memory"); /* full memory clobber to make this a compiler barrier */
111
static ETHR_INLINE long
112
ethr_native_atomic_and_retold(ethr_native_atomic_t *var, long mask)
119
tmp = ethr_native_atomic_cmpxchg(var, tmp & mask, tmp);
120
} while (__builtin_expect(tmp != old, 0));
121
/* now tmp is the atomic's previous value */
125
static ETHR_INLINE long
126
ethr_native_atomic_or_retold(ethr_native_atomic_t *var, long mask)
133
tmp = ethr_native_atomic_cmpxchg(var, tmp | mask, tmp);
134
} while (__builtin_expect(tmp != old, 0));
135
/* now tmp is the atomic's previous value */
139
static ETHR_INLINE long
140
ethr_native_atomic_xchg(ethr_native_atomic_t *var, long val)
174
static ETHR_INLINE ETHR_AINT_T__
175
ETHR_NATMC_FUNC__(and_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
177
ETHR_AINT_T__ tmp, old;
182
tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp & mask, tmp);
183
} while (__builtin_expect(tmp != old, 0));
184
/* now tmp is the atomic's previous value */
188
static ETHR_INLINE ETHR_AINT_T__
189
ETHR_NATMC_FUNC__(or_retold)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ mask)
191
ETHR_AINT_T__ tmp, old;
196
tmp = ETHR_NATMC_FUNC__(cmpxchg)(var, tmp | mask, tmp);
197
} while (__builtin_expect(tmp != old, 0));
198
/* now tmp is the atomic's previous value */
202
static ETHR_INLINE ETHR_AINT_T__
203
ETHR_NATMC_FUNC__(xchg)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ val)
205
ETHR_AINT_T__ tmp = val;
143
206
__asm__ __volatile__(
144
"xchg" LONG_SUFFIX " %0, %1"
207
"xchg" ETHR_AINT_SUFFIX__ " %0, %1"
146
209
: "m"(var->counter), "0"(tmp));
147
210
/* now tmp is the atomic's previous value */
215
* Atomic ops with at least specified barriers.
218
static ETHR_INLINE ETHR_AINT_T__
219
ETHR_NATMC_FUNC__(read_acqb)(ETHR_ATMC_T__ *var)
222
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
225
val = ETHR_NATMC_FUNC__(add_return)(var, 0);
227
__asm__ __volatile__("" : : : "memory");
231
static ETHR_INLINE void
232
ETHR_NATMC_FUNC__(set_relb)(ETHR_ATMC_T__ *var, ETHR_AINT_T__ i)
234
__asm__ __volatile__("" : : : "memory");
235
#if defined(__x86_64__) || !defined(ETHR_PRE_PENTIUM4_COMPAT)
238
(void) ETHR_NATMC_FUNC__(xchg)(var, i);
242
static ETHR_INLINE ETHR_AINT_T__
243
ETHR_NATMC_FUNC__(inc_return_acqb)(ETHR_ATMC_T__ *var)
245
ETHR_AINT_T__ res = ETHR_NATMC_FUNC__(inc_return)(var);
246
__asm__ __volatile__("" : : : "memory");
250
static ETHR_INLINE void
251
ETHR_NATMC_FUNC__(dec_relb)(ETHR_ATMC_T__ *var)
253
__asm__ __volatile__("" : : : "memory");
254
ETHR_NATMC_FUNC__(dec)(var);
257
static ETHR_INLINE ETHR_AINT_T__
258
ETHR_NATMC_FUNC__(dec_return_relb)(ETHR_ATMC_T__ *var)
260
__asm__ __volatile__("" : : : "memory");
261
return ETHR_NATMC_FUNC__(dec_return)(var);
264
static ETHR_INLINE ETHR_AINT_T__
265
ETHR_NATMC_FUNC__(cmpxchg_acqb)(ETHR_ATMC_T__ *var,
269
return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
272
static ETHR_INLINE ETHR_AINT_T__
273
ETHR_NATMC_FUNC__(cmpxchg_relb)(ETHR_ATMC_T__ *var,
277
return ETHR_NATMC_FUNC__(cmpxchg)(var, new, old);
153
280
#endif /* ETHR_TRY_INLINE_FUNCS */
155
#endif /* ETHREAD_I386_ATOMIC_H */
282
#undef ETHR_NATMC_FUNC__
285
#undef ETHR_AINT_SUFFIX__
287
#endif /* ETHR_INCLUDE_ATOMIC_IMPL__ */