2
* Copyright (c) 2003 Hewlett-Packard Development Company, L.P.
4
* Permission is hereby granted, free of charge, to any person obtaining a copy
5
* of this software and associated documentation files (the "Software"), to deal
6
* in the Software without restriction, including without limitation the rights
7
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8
* copies of the Software, and to permit persons to whom the Software is
9
* furnished to do so, subject to the following conditions:
11
* The above copyright notice and this permission notice shall be included in
12
* all copies or substantial portions of the Software.
14
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23
#include "../all_atomic_load_store.h"
25
#include "../all_acquire_release_volatile.h"
27
#include "../test_and_set_t_is_char.h"
30
/* 32-bit HP/UX code. */
31
/* This requires pointer "swizzling". Pointers need to be expanded */
32
/* to 64 bits using the addp4 instruction before use. This makes it */
33
/* hard to share code, but we try anyway. */
35
/* We assume that addr always appears in argument position 1 in asm */
36
/* code. If it is clobbered due to swizzling, we also need it in */
37
/* second position. Any later arguments are referenced symbolically, */
38
/* so that we don't have to worry about their position. This requires*/
39
/* gcc 3.1, but you shouldn't be using anything older than that on */
41
/* The AO_MASK macro is a workaround for the fact that HP/UX gcc */
42
/* appears to otherwise store 64-bit pointers in ar.ccv, i.e. it */
43
/* doesn't appear to clear high bits in a pointer value we pass into */
44
/* assembly code, even if it is supposedly of type AO_t. */
45
# define AO_IN_ADDR "1"(addr)
46
# define AO_OUT_ADDR , "=r"(addr)
47
# define AO_SWIZZLE "addp4 %1=0,%1;;\n"
48
# define AO_MASK(ptr) __asm__("zxt4 %1=%1": "=r"(ptr) : "0"(ptr));
51
# define AO_IN_ADDR "r"(addr)
60
__asm__ __volatile__("mf" : : : "memory");
62
#define AO_HAVE_nop_full
65
AO_fetch_and_add1_acquire (volatile AO_t *addr)
69
__asm__ __volatile__ (AO_SWIZZLE
70
"fetchadd" AO_LEN ".acq %0=[%1],1":
71
"=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
74
#define AO_HAVE_fetch_and_add1_acquire
77
AO_fetch_and_add1_release (volatile AO_t *addr)
81
__asm__ __volatile__ (AO_SWIZZLE
82
"fetchadd" AO_LEN ".rel %0=[%1],1":
83
"=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
87
#define AO_HAVE_fetch_and_add1_release
90
AO_fetch_and_sub1_acquire (volatile AO_t *addr)
94
__asm__ __volatile__ (AO_SWIZZLE
95
"fetchadd" AO_LEN ".acq %0=[%1],-1":
96
"=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
100
#define AO_HAVE_fetch_and_sub1_acquire
103
AO_fetch_and_sub1_release (volatile AO_t *addr)
107
__asm__ __volatile__ (AO_SWIZZLE
108
"fetchadd" AO_LEN ".rel %0=[%1],-1":
109
"=r" (result) AO_OUT_ADDR: AO_IN_ADDR :"memory");
113
#define AO_HAVE_fetch_and_sub1_release
117
AO_INLINE unsigned int
118
AO_int_fetch_and_add1_acquire (volatile unsigned int *addr)
122
__asm__ __volatile__ ("fetchadd4.acq %0=[%1],1":
123
"=r" (result): AO_IN_ADDR :"memory");
126
#define AO_HAVE_int_fetch_and_add1_acquire
128
AO_INLINE unsigned int
129
AO_int_fetch_and_add1_release (volatile unsigned int *addr)
133
__asm__ __volatile__ ("fetchadd4.rel %0=[%1],1":
134
"=r" (result): AO_IN_ADDR :"memory");
138
#define AO_HAVE_int_fetch_and_add1_release
140
AO_INLINE unsigned int
141
AO_int_fetch_and_sub1_acquire (volatile unsigned int *addr)
145
__asm__ __volatile__ ("fetchadd4.acq %0=[%1],-1":
146
"=r" (result): AO_IN_ADDR :"memory");
150
#define AO_HAVE_int_fetch_and_sub1_acquire
152
AO_INLINE unsigned int
153
AO_int_fetch_and_sub1_release (volatile unsigned int *addr)
157
__asm__ __volatile__ ("fetchadd4.rel %0=[%1],-1":
158
"=r" (result): AO_IN_ADDR :"memory");
162
#define AO_HAVE_int_fetch_and_sub1_release
167
AO_compare_and_swap_acquire(volatile AO_t *addr,
168
AO_t old, AO_t new_val)
172
__asm__ __volatile__(AO_SWIZZLE
173
"mov ar.ccv=%[old] ;; cmpxchg" AO_LEN
174
".acq %0=[%1],%[new_val],ar.ccv"
175
: "=r"(oldval) AO_OUT_ADDR
176
: AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"(old)
178
return (oldval == old);
181
#define AO_HAVE_compare_and_swap_acquire
184
AO_compare_and_swap_release(volatile AO_t *addr,
185
AO_t old, AO_t new_val)
189
__asm__ __volatile__(AO_SWIZZLE
190
"mov ar.ccv=%[old] ;; cmpxchg" AO_LEN
191
".rel %0=[%1],%[new_val],ar.ccv"
192
: "=r"(oldval) AO_OUT_ADDR
193
: AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"(old)
195
return (oldval == old);
198
#define AO_HAVE_compare_and_swap_release
201
AO_char_compare_and_swap_acquire(volatile unsigned char *addr,
202
unsigned char old, unsigned char new_val)
204
unsigned char oldval;
205
__asm__ __volatile__(AO_SWIZZLE
206
"mov ar.ccv=%[old] ;; cmpxchg1.acq %0=[%1],%[new_val],ar.ccv"
207
: "=r"(oldval) AO_OUT_ADDR
208
: AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
210
return (oldval == old);
213
#define AO_HAVE_char_compare_and_swap_acquire
216
AO_char_compare_and_swap_release(volatile unsigned char *addr,
217
unsigned char old, unsigned char new_val)
219
unsigned char oldval;
220
__asm__ __volatile__(AO_SWIZZLE
221
"mov ar.ccv=%[old] ;; cmpxchg1.rel %0=[%1],%[new_val],ar.ccv"
222
: "=r"(oldval) AO_OUT_ADDR
223
: AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
225
return (oldval == old);
228
#define AO_HAVE_char_compare_and_swap_release
231
AO_short_compare_and_swap_acquire(volatile unsigned short *addr,
232
unsigned short old, unsigned short new_val)
234
unsigned short oldval;
235
__asm__ __volatile__(AO_SWIZZLE
236
"mov ar.ccv=%[old] ;; cmpxchg2.acq %0=[%1],%[new_val],ar.ccv"
237
: "=r"(oldval) AO_OUT_ADDR
238
: AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
240
return (oldval == old);
243
#define AO_HAVE_short_compare_and_swap_acquire
246
AO_short_compare_and_swap_release(volatile unsigned short *addr,
247
unsigned short old, unsigned short new_val)
249
unsigned short oldval;
250
__asm__ __volatile__(AO_SWIZZLE
251
"mov ar.ccv=%[old] ;; cmpxchg2.rel %0=[%1],%[new_val],ar.ccv"
252
: "=r"(oldval) AO_OUT_ADDR
253
: AO_IN_ADDR, [new_val]"r"(new_val), [old]"r"((AO_t)old)
255
return (oldval == old);
258
#define AO_HAVE_short_compare_and_swap_release
263
AO_int_compare_and_swap_acquire(volatile unsigned int *addr,
264
unsigned int old, unsigned int new_val)
267
__asm__ __volatile__("mov ar.ccv=%3 ;; cmpxchg4.acq %0=[%1],%2,ar.ccv"
269
: AO_IN_ADDR, "r"(new_val), "r"((AO_t)old) : "memory");
270
return (oldval == old);
273
#define AO_HAVE_int_compare_and_swap_acquire
276
AO_int_compare_and_swap_release(volatile unsigned int *addr,
277
unsigned int old, unsigned int new_val)
280
__asm__ __volatile__("mov ar.ccv=%3 ;; cmpxchg4.rel %0=[%1],%2,ar.ccv"
282
: AO_IN_ADDR, "r"(new_val), "r"((AO_t)old) : "memory");
283
return (oldval == old);
286
#define AO_HAVE_int_compare_and_swap_release
290
/* FIXME: Add compare_and_swap_double as soon as there is widely */
291
/* available hardware that implements it. */
293
/* FIXME: Add compare_double_and_swap_double for the _ILP32 case. */
296
# include "../ao_t_is_int.h"