1
/* Copyright 2000-2005 The Apache Software Foundation or its licensors, as
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
* you may not use this file except in compliance with the License.
6
* You may obtain a copy of the License at
8
* http://www.apache.org/licenses/LICENSE-2.0
10
* Unless required by applicable law or agreed to in writing, software
11
* distributed under the License is distributed on an "AS IS" BASIS,
12
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
* See the License for the specific language governing permissions and
14
* limitations under the License.
18
#include "apr_atomic.h"
19
#include "apr_thread_mutex.h"
21
#include "apr_private.h"
25
#if defined(__GNUC__) && defined(__STRICT_ANSI__) && !defined(USE_GENERIC_ATOMICS)
26
/* force use of generic atomics if building e.g. with -std=c89, which
27
* doesn't allow inline asm */
28
#define USE_GENERIC_ATOMICS
31
#if (defined(__i386__) || defined(__x86_64__)) \
32
&& defined(__GNUC__) && !defined(USE_GENERIC_ATOMICS)
34
APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem,
40
asm volatile ("lock; cmpxchgl %1, %2"
42
: "r" (with), "m" (*(mem)), "0"(cmp)
46
#define APR_OVERRIDE_ATOMIC_CAS32
48
static apr_uint32_t inline intel_atomic_add32(volatile apr_uint32_t *mem,
51
asm volatile ("lock; xaddl %0,%1"
52
: "=r"(val), "=m"(*mem) /* outputs */
53
: "0"(val), "m"(*mem) /* inputs */
58
APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem,
61
return intel_atomic_add32(mem, val);
63
#define APR_OVERRIDE_ATOMIC_ADD32
65
APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
67
asm volatile ("lock; subl %1, %0"
69
: "m" (*(mem)), "r" (val)
72
#define APR_OVERRIDE_ATOMIC_SUB32
74
APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
78
asm volatile ("lock; decl %1;\n\t"
85
#define APR_OVERRIDE_ATOMIC_DEC32
87
APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
89
return intel_atomic_add32(mem, 1);
91
#define APR_OVERRIDE_ATOMIC_INC32
93
APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
97
#define APR_OVERRIDE_ATOMIC_SET32
99
APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
101
apr_uint32_t prev = val;
103
asm volatile ("lock; xchgl %0, %1"
105
: "m" (*(mem)), "0"(prev)
109
#define APR_OVERRIDE_ATOMIC_XCHG32
111
/*#define apr_atomic_init(pool) APR_SUCCESS*/
113
#endif /* (__linux__ || __EMX__ || __FreeBSD__) && __i386__ */
115
#if (defined(__PPC__) || defined(__ppc__)) && defined(__GNUC__) \
116
&& !defined(USE_GENERIC_ATOMICS)
118
APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem,
124
asm volatile ("0:\n\t" /* retry local label */
125
"lwarx %0,0,%1\n\t" /* load prev and reserve */
126
"cmpw %0,%3\n\t" /* does it match cmp? */
127
"bne- 1f\n\t" /* ...no, bail out */
128
"stwcx. %2,0,%1\n\t" /* ...yes, conditionally
130
"bne- 0b\n\t" /* start over if we lost
132
"1:" /* exit local label */
134
: "=&r"(prev) /* output */
135
: "b" (mem), "r" (swap), "r"(cmp) /* inputs */
136
: "memory", "cc"); /* clobbered */
139
#define APR_OVERRIDE_ATOMIC_CAS32
141
APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem,
144
apr_uint32_t prev, temp;
146
asm volatile ("0:\n\t" /* retry local label */
147
"lwarx %0,0,%2\n\t" /* load prev and reserve */
148
"add %1,%0,%3\n\t" /* temp = prev + delta */
149
"stwcx. %1,0,%2\n\t" /* conditionally store */
150
"bne- 0b" /* start over if we lost
153
/*XXX find a cleaner way to define the temp
156
: "=&r" (prev), "=&r" (temp) /* output, temp */
157
: "b" (mem), "r" (delta) /* inputs */
158
: "memory", "cc"); /* clobbered */
161
#define APR_OVERRIDE_ATOMIC_ADD32
163
#endif /* __PPC__ && __GNUC__ */
165
#if !defined(APR_OVERRIDE_ATOMIC_INIT)
168
#define NUM_ATOMIC_HASH 7
169
/* shift by 2 to get rid of alignment issues */
170
#define ATOMIC_HASH(x) (unsigned int)(((unsigned long)(x)>>2)%(unsigned int)NUM_ATOMIC_HASH)
171
static apr_thread_mutex_t **hash_mutex;
172
#endif /* APR_HAS_THREADS */
174
apr_status_t apr_atomic_init(apr_pool_t *p)
179
hash_mutex = apr_palloc(p, sizeof(apr_thread_mutex_t*) * NUM_ATOMIC_HASH);
181
for (i = 0; i < NUM_ATOMIC_HASH; i++) {
182
rv = apr_thread_mutex_create(&(hash_mutex[i]),
183
APR_THREAD_MUTEX_DEFAULT, p);
184
if (rv != APR_SUCCESS) {
188
#endif /* APR_HAS_THREADS */
191
#endif /* !defined(APR_OVERRIDE_ATOMIC_INIT) */
193
/* abort() if 'x' does not evaluate to APR_SUCCESS. */
194
#define CHECK(x) do { if ((x) != APR_SUCCESS) abort(); } while (0)
196
#if !defined(APR_OVERRIDE_ATOMIC_ADD32)
197
#if defined(APR_OVERRIDE_ATOMIC_CAS32)
198
apr_uint32_t apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
200
apr_uint32_t old_value, new_value;
204
new_value = old_value + val;
205
} while (apr_atomic_cas32(mem, new_value, old_value) != old_value);
209
apr_uint32_t apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint32_t val)
211
apr_uint32_t old_value;
214
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
216
CHECK(apr_thread_mutex_lock(lock));
219
CHECK(apr_thread_mutex_unlock(lock));
223
#endif /* APR_HAS_THREADS */
226
#endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
227
#endif /* !defined(APR_OVERRIDE_ATOMIC_ADD32) */
229
#if !defined(APR_OVERRIDE_ATOMIC_SUB32)
230
#if defined(APR_OVERRIDE_ATOMIC_CAS32)
231
void apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
233
apr_uint32_t old_value, new_value;
237
new_value = old_value - val;
238
} while (apr_atomic_cas32(mem, new_value, old_value) != old_value);
241
void apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
244
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
246
CHECK(apr_thread_mutex_lock(lock));
248
CHECK(apr_thread_mutex_unlock(lock));
251
#endif /* APR_HAS_THREADS */
253
#endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
254
#endif /* !defined(APR_OVERRIDE_ATOMIC_SUB32) */
256
#if !defined(APR_OVERRIDE_ATOMIC_SET32)
257
void apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
260
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
262
CHECK(apr_thread_mutex_lock(lock));
264
CHECK(apr_thread_mutex_unlock(lock));
267
#endif /* APR_HAS_THREADS */
269
#endif /* !defined(APR_OVERRIDE_ATOMIC_SET32) */
271
#if !defined(APR_OVERRIDE_ATOMIC_INC32)
272
apr_uint32_t apr_atomic_inc32(volatile apr_uint32_t *mem)
274
return apr_atomic_add32(mem, 1);
276
#endif /* !defined(APR_OVERRIDE_ATOMIC_INC32) */
278
#if !defined(APR_OVERRIDE_ATOMIC_DEC32)
279
#if defined(APR_OVERRIDE_ATOMIC_CAS32)
280
int apr_atomic_dec32(volatile apr_uint32_t *mem)
282
apr_uint32_t old_value, new_value;
286
new_value = old_value - 1;
287
} while (apr_atomic_cas32(mem, new_value, old_value) != old_value);
288
return old_value != 1;
291
int apr_atomic_dec32(volatile apr_uint32_t *mem)
294
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
297
CHECK(apr_thread_mutex_lock(lock));
300
CHECK(apr_thread_mutex_unlock(lock));
305
#endif /* APR_HAS_THREADS */
307
#endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
308
#endif /* !defined(APR_OVERRIDE_ATOMIC_DEC32) */
310
#if !defined(APR_OVERRIDE_ATOMIC_CAS32)
311
apr_uint32_t apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint32_t with,
316
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
318
CHECK(apr_thread_mutex_lock(lock));
323
CHECK(apr_thread_mutex_unlock(lock));
329
#endif /* APR_HAS_THREADS */
332
#endif /* !defined(APR_OVERRIDE_ATOMIC_CAS32) */
334
#if !defined(APR_OVERRIDE_ATOMIC_XCHG32)
335
#if defined(APR_OVERRIDE_ATOMIC_CAS32)
336
apr_uint32_t apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
341
} while (apr_atomic_cas32(mem, val, prev) != prev);
345
apr_uint32_t apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint32_t val)
349
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
351
CHECK(apr_thread_mutex_lock(lock));
354
CHECK(apr_thread_mutex_unlock(lock));
358
#endif /* APR_HAS_THREADS */
361
#endif /* defined(APR_OVERRIDE_ATOMIC_CAS32) */
362
#endif /* !defined(APR_OVERRIDE_ATOMIC_XCHG32) */
364
#if !defined(APR_OVERRIDE_ATOMIC_CASPTR)
365
void *apr_atomic_casptr(volatile void **mem, void *with, const void *cmp)
369
apr_thread_mutex_t *lock = hash_mutex[ATOMIC_HASH(mem)];
371
CHECK(apr_thread_mutex_lock(lock));
372
prev = *(void **)mem;
376
CHECK(apr_thread_mutex_unlock(lock));
378
prev = *(void **)mem;
382
#endif /* APR_HAS_THREADS */
385
#endif /* !defined(APR_OVERRIDE_ATOMIC_CASPTR) */
387
#if !defined(APR_OVERRIDE_ATOMIC_READ32)
388
APR_DECLARE(apr_uint32_t) apr_atomic_read32(volatile apr_uint32_t *mem)