1
/* Copyright (C) 2011 Free Software Foundation, Inc.
2
This file is part of the GNU C Library.
3
Contributed by Chris Metcalf <cmetcalf@tilera.com>, 2011.
5
The GNU C Library is free software; you can redistribute it and/or
6
modify it under the terms of the GNU Lesser General Public
7
License as published by the Free Software Foundation; either
8
version 2.1 of the License, or (at your option) any later version.
10
The GNU C Library is distributed in the hope that it will be useful,
11
but WITHOUT ANY WARRANTY; without even the implied warranty of
12
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13
Lesser General Public License for more details.
15
You should have received a copy of the GNU Lesser General Public
16
License along with the GNU C Library; if not, write to the Free
17
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
20
#ifndef _LOWLEVELLOCK_H
21
#define _LOWLEVELLOCK_H 1
24
#include <sys/param.h>
25
#include <bits/pthreadtypes.h>
28
#include <kernel-features.h>
33
#define FUTEX_REQUEUE 3
34
#define FUTEX_CMP_REQUEUE 4
35
#define FUTEX_WAKE_OP 5
36
#define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
37
#define FUTEX_LOCK_PI 6
38
#define FUTEX_UNLOCK_PI 7
39
#define FUTEX_TRYLOCK_PI 8
40
#define FUTEX_WAIT_BITSET 9
41
#define FUTEX_WAKE_BITSET 10
42
#define FUTEX_PRIVATE_FLAG 128
43
#define FUTEX_CLOCK_REALTIME 256
45
#define FUTEX_BITSET_MATCH_ANY 0xffffffff
47
/* Values for 'private' parameter of locking macros. Yes, the
48
definition seems to be backwards. But it is not. The bit will be
49
reversed before passing to the system call. */
51
#define LLL_SHARED FUTEX_PRIVATE_FLAG
54
#if !defined NOT_IN_libc || defined IS_IN_rtld
55
/* In libc.so or ld.so all futexes are private. */
56
# ifdef __ASSUME_PRIVATE_FUTEX
57
# define __lll_private_flag(fl, private) \
58
((fl) | FUTEX_PRIVATE_FLAG)
60
# define __lll_private_flag(fl, private) \
61
((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
64
# ifdef __ASSUME_PRIVATE_FUTEX
65
# define __lll_private_flag(fl, private) \
66
(((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
68
# define __lll_private_flag(fl, private) \
69
(__builtin_constant_p (private) \
71
? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
73
: ((fl) | (((private) ^ FUTEX_PRIVATE_FLAG) \
74
& THREAD_GETMEM (THREAD_SELF, header.private_futex))))
79
#define lll_futex_wait(futexp, val, private) \
80
lll_futex_timed_wait (futexp, val, NULL, private)
82
#define lll_futex_timed_wait(futexp, val, timespec, private) \
84
INTERNAL_SYSCALL_DECL (__err); \
85
INTERNAL_SYSCALL (futex, __err, 4, (futexp), \
86
__lll_private_flag (FUTEX_WAIT, private), \
90
#define lll_futex_wake(futexp, nr, private) \
92
INTERNAL_SYSCALL_DECL (__err); \
93
INTERNAL_SYSCALL (futex, __err, 4, (futexp), \
94
__lll_private_flag (FUTEX_WAKE, private), \
98
#define lll_robust_dead(futexv, private) \
101
int *__futexp = &(futexv); \
102
atomic_or (__futexp, FUTEX_OWNER_DIED); \
103
lll_futex_wake (__futexp, 1, private); \
107
/* Returns non-zero if error happened, zero if success. */
108
#define lll_futex_requeue(futexp, nr_wake, nr_move, mutex, val, private) \
110
INTERNAL_SYSCALL_DECL (__err); \
112
__ret = INTERNAL_SYSCALL (futex, __err, 6, (futexp), \
113
__lll_private_flag (FUTEX_CMP_REQUEUE, private),\
114
(nr_wake), (nr_move), (mutex), (val)); \
115
INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
118
/* Returns non-zero if error happened, zero if success. */
119
#define lll_futex_wake_unlock(futexp, nr_wake, nr_wake2, futexp2, private) \
121
INTERNAL_SYSCALL_DECL (__err); \
123
__ret = INTERNAL_SYSCALL (futex, __err, 6, (futexp), \
124
__lll_private_flag (FUTEX_WAKE_OP, private), \
125
(nr_wake), (nr_wake2), (futexp2), \
126
FUTEX_OP_CLEAR_WAKE_IF_GT_ONE); \
127
INTERNAL_SYSCALL_ERROR_P (__ret, __err); \
133
static inline int __attribute__ ((always_inline))
134
__lll_trylock (int *futex)
136
return atomic_compare_and_exchange_val_acq (futex, 1, 0) != 0;
138
#define lll_trylock(lock) __lll_trylock (&(lock))
141
static inline int __attribute__ ((always_inline))
142
__lll_cond_trylock (int *futex)
144
return atomic_compare_and_exchange_val_acq (futex, 2, 0) != 0;
146
#define lll_cond_trylock(lock) __lll_cond_trylock (&(lock))
149
static inline int __attribute__ ((always_inline))
150
__lll_robust_trylock (int *futex, int id)
152
return atomic_compare_and_exchange_val_acq (futex, id, 0) != 0;
154
#define lll_robust_trylock(lock, id) \
155
__lll_robust_trylock (&(lock), id)
157
extern void __lll_lock_wait_private (int *futex) attribute_hidden;
158
extern void __lll_lock_wait (int *futex, int private) attribute_hidden;
159
extern int __lll_robust_lock_wait (int *futex, int private) attribute_hidden;
161
static inline void __attribute__ ((always_inline))
162
__lll_lock (int *futex, int private)
164
if (atomic_compare_and_exchange_bool_acq (futex, 1, 0) != 0)
166
if (__builtin_constant_p (private) && private == LLL_PRIVATE)
167
__lll_lock_wait_private (futex);
169
__lll_lock_wait (futex, private);
172
#define lll_lock(futex, private) __lll_lock (&(futex), private)
175
static inline int __attribute__ ((always_inline))
176
__lll_robust_lock (int *futex, int id, int private)
179
if (atomic_compare_and_exchange_bool_acq (futex, id, 0) != 0)
180
result = __lll_robust_lock_wait (futex, private);
183
#define lll_robust_lock(futex, id, private) \
184
__lll_robust_lock (&(futex), id, private)
187
static inline void __attribute__ ((always_inline))
188
__lll_cond_lock (int *futex, int private)
190
if (atomic_compare_and_exchange_bool_acq (futex, 2, 0) != 0)
191
__lll_lock_wait (futex, private);
193
#define lll_cond_lock(futex, private) __lll_cond_lock (&(futex), private)
196
#define lll_robust_cond_lock(futex, id, private) \
197
__lll_robust_lock (&(futex), (id) | FUTEX_WAITERS, private)
200
extern int __lll_timedlock_wait (int *futex, const struct timespec *,
201
int private) attribute_hidden;
202
extern int __lll_robust_timedlock_wait (int *futex, const struct timespec *,
203
int private) attribute_hidden;
205
static inline int __attribute__ ((always_inline))
206
__lll_timedlock (int *futex, const struct timespec *abstime, int private)
209
if (atomic_compare_and_exchange_bool_acq (futex, 1, 0) != 0)
210
result = __lll_timedlock_wait (futex, abstime, private);
213
#define lll_timedlock(futex, abstime, private) \
214
__lll_timedlock (&(futex), abstime, private)
217
static inline int __attribute__ ((always_inline))
218
__lll_robust_timedlock (int *futex, const struct timespec *abstime,
222
if (atomic_compare_and_exchange_bool_acq (futex, id, 0) != 0)
223
result = __lll_robust_timedlock_wait (futex, abstime, private);
226
#define lll_robust_timedlock(futex, abstime, id, private) \
227
__lll_robust_timedlock (&(futex), abstime, id, private)
230
#define __lll_unlock(futex, private) \
232
({ int *__futex = (futex); \
233
int __oldval = atomic_exchange_rel (__futex, 0); \
234
if (__builtin_expect (__oldval > 1, 0)) \
235
lll_futex_wake (__futex, 1, private); \
237
#define lll_unlock(futex, private) __lll_unlock(&(futex), private)
240
#define __lll_robust_unlock(futex, private) \
242
({ int *__futex = (futex); \
243
int __oldval = atomic_exchange_rel (__futex, 0); \
244
if (__builtin_expect (__oldval & FUTEX_WAITERS, 0)) \
245
lll_futex_wake (__futex, 1, private); \
247
#define lll_robust_unlock(futex, private) \
248
__lll_robust_unlock(&(futex), private)
251
#define lll_islocked(futex) \
254
/* Initializers for lock. */
255
#define LLL_LOCK_INITIALIZER (0)
256
#define LLL_LOCK_INITIALIZER_LOCKED (1)
259
/* The kernel notifies a process which uses CLONE_CLEARTID via futex
260
wakeup when the clone terminates. The memory location contains the
261
thread ID while the clone is running and is reset to zero
263
#define lll_wait_tid(tid) \
265
__typeof (tid) __tid; \
266
while ((__tid = (tid)) != 0) \
267
lll_futex_wait (&(tid), __tid, LLL_SHARED); \
270
extern int __lll_timedwait_tid (int *, const struct timespec *)
273
#define lll_timedwait_tid(tid, abstime) \
277
__res = __lll_timedwait_tid (&(tid), (abstime)); \
281
#endif /* lowlevellock.h */