1
#ifndef IA64_INTRINSIC_H
2
#define IA64_INTRINSIC_H
5
* Compiler-dependent Intrinsics
7
* Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
8
* Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
11
extern long ia64_cmpxchg_called_with_bad_pointer (void);
12
extern void ia64_bad_param_for_getreg (void);
13
#define ia64_cmpxchg(sem,ptr,o,n,s) ({ \
16
case 1: _o = (uint8_t)(long)(o); break; \
17
case 2: _o = (uint16_t)(long)(o); break; \
18
case 4: _o = (uint32_t)(long)(o); break; \
19
case 8: _o = (uint64_t)(long)(o); break; \
24
_r = ia64_cmpxchg1_##sem((uint8_t*)ptr,n,_o); break; \
26
_r = ia64_cmpxchg2_##sem((uint16_t*)ptr,n,_o); break; \
28
_r = ia64_cmpxchg4_##sem((uint32_t*)ptr,n,_o); break; \
30
_r = ia64_cmpxchg8_##sem((uint64_t*)ptr,n,_o); break; \
32
_r = ia64_cmpxchg_called_with_bad_pointer(); break; \
37
#define cmpxchg_acq(ptr,o,n) ia64_cmpxchg(acq,ptr,o,n,sizeof(*ptr))
38
#define cmpxchg_rel(ptr,o,n) ia64_cmpxchg(rel,ptr,o,n,sizeof(*ptr))
40
#ifdef __INTEL_COMPILER
41
void __fc(uint64_t *addr);
45
uint64_t __getReg(const int whichReg);
46
uint64_t _InterlockedCompareExchange8_rel(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
47
uint64_t _InterlockedCompareExchange8_acq(volatile uint8_t *dest, uint64_t xchg, uint64_t comp);
48
uint64_t _InterlockedCompareExchange16_rel(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
49
uint64_t _InterlockedCompareExchange16_acq(volatile uint16_t *dest, uint64_t xchg, uint64_t comp);
50
uint64_t _InterlockedCompareExchange_rel(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
51
uint64_t _InterlockedCompareExchange_acq(volatile uint32_t *dest, uint64_t xchg, uint64_t comp);
52
uint64_t _InterlockedCompareExchange64_rel(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
53
u64_t _InterlockedCompareExchange64_acq(volatile uint64_t *dest, uint64_t xchg, uint64_t comp);
55
#define ia64_cmpxchg1_rel _InterlockedCompareExchange8_rel
56
#define ia64_cmpxchg1_acq _InterlockedCompareExchange8_acq
57
#define ia64_cmpxchg2_rel _InterlockedCompareExchange16_rel
58
#define ia64_cmpxchg2_acq _InterlockedCompareExchange16_acq
59
#define ia64_cmpxchg4_rel _InterlockedCompareExchange_rel
60
#define ia64_cmpxchg4_acq _InterlockedCompareExchange_acq
61
#define ia64_cmpxchg8_rel _InterlockedCompareExchange64_rel
62
#define ia64_cmpxchg8_acq _InterlockedCompareExchange64_acq
64
#define ia64_srlz_d __dsrlz
65
#define ia64_srlz_i __isrlz
66
#define __ia64_fc __fc
67
#define ia64_sync_i __synci
68
#define __ia64_getreg __getReg
69
#else /* __INTEL_COMPILER */
70
#define ia64_cmpxchg1_acq(ptr, new, old) \
72
uint64_t ia64_intri_res; \
73
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
74
asm volatile ("cmpxchg1.acq %0=[%1],%2,ar.ccv": \
75
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
79
#define ia64_cmpxchg1_rel(ptr, new, old) \
81
uint64_t ia64_intri_res; \
82
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
83
asm volatile ("cmpxchg1.rel %0=[%1],%2,ar.ccv": \
84
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
88
#define ia64_cmpxchg2_acq(ptr, new, old) \
90
uint64_t ia64_intri_res; \
91
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
92
asm volatile ("cmpxchg2.acq %0=[%1],%2,ar.ccv": \
93
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
97
#define ia64_cmpxchg2_rel(ptr, new, old) \
99
uint64_t ia64_intri_res; \
100
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
102
asm volatile ("cmpxchg2.rel %0=[%1],%2,ar.ccv": \
103
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
107
#define ia64_cmpxchg4_acq(ptr, new, old) \
109
uint64_t ia64_intri_res; \
110
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
111
asm volatile ("cmpxchg4.acq %0=[%1],%2,ar.ccv": \
112
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
116
#define ia64_cmpxchg4_rel(ptr, new, old) \
118
uint64_t ia64_intri_res; \
119
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
120
asm volatile ("cmpxchg4.rel %0=[%1],%2,ar.ccv": \
121
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
125
#define ia64_cmpxchg8_acq(ptr, new, old) \
127
uint64_t ia64_intri_res; \
128
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
129
asm volatile ("cmpxchg8.acq %0=[%1],%2,ar.ccv": \
130
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
134
#define ia64_cmpxchg8_rel(ptr, new, old) \
136
uint64_t ia64_intri_res; \
137
asm volatile ("mov ar.ccv=%0;;" :: "rO"(old)); \
139
asm volatile ("cmpxchg8.rel %0=[%1],%2,ar.ccv": \
140
"=r"(ia64_intri_res) : "r"(ptr), "r"(new) : "memory"); \
144
#define ia64_srlz_i() asm volatile (";; srlz.i ;;" ::: "memory")
145
#define ia64_srlz_d() asm volatile (";; srlz.d" ::: "memory");
146
#define __ia64_fc(addr) asm volatile ("fc %0" :: "r"(addr) : "memory")
147
#define ia64_sync_i() asm volatile (";; sync.i" ::: "memory")
149
#endif /* __INTEL_COMPILER */
150
#endif /* IA64_INTRINSIC_H */