2
* Copyright (c) 2001-2004 Jakub Jermar
5
* Redistribution and use in source and binary forms, with or without
6
* modification, are permitted provided that the following conditions
9
* - Redistributions of source code must retain the above copyright
10
* notice, this list of conditions and the following disclaimer.
11
* - Redistributions in binary form must reproduce the above copyright
12
* notice, this list of conditions and the following disclaimer in the
13
* documentation and/or other materials provided with the distribution.
14
* - The name of the author may not be used to endorse or promote products
15
* derived from this software without specific prior written permission.
17
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
/** @addtogroup libcamd64 amd64
31
* @brief amd64 architecture dependent parts of libc
37
#ifndef LIBC_amd64_ATOMIC_H_
38
#define LIBC_amd64_ATOMIC_H_
40
static inline void atomic_inc(atomic_t *val) {
41
asm volatile ("lock incq %0\n" : "+m" (val->count));
44
static inline void atomic_dec(atomic_t *val) {
45
asm volatile ("lock decq %0\n" : "+m" (val->count));
48
static inline long atomic_postinc(atomic_t *val)
55
: "=r" (r), "+m" (val->count)
61
static inline long atomic_postdec(atomic_t *val)
68
: "=r" (r), "+m" (val->count)
74
#define atomic_preinc(val) (atomic_postinc(val) + 1)
75
#define atomic_predec(val) (atomic_postdec(val) - 1)