2
* Copyright (c) 2005 Martin Decky
5
* Redistribution and use in source and binary forms, with or without
6
* modification, are permitted provided that the following conditions
9
* - Redistributions of source code must retain the above copyright
10
* notice, this list of conditions and the following disclaimer.
11
* - Redistributions in binary form must reproduce the above copyright
12
* notice, this list of conditions and the following disclaimer in the
13
* documentation and/or other materials provided with the distribution.
14
* - The name of the author may not be used to endorse or promote products
15
* derived from this software without specific prior written permission.
17
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35
#ifndef KERN_ppc32_BARRIER_H_
36
#define KERN_ppc32_BARRIER_H_
38
#define CS_ENTER_BARRIER() asm volatile ("" ::: "memory")
39
#define CS_LEAVE_BARRIER() asm volatile ("" ::: "memory")
41
#define memory_barrier() asm volatile ("sync" ::: "memory")
42
#define read_barrier() asm volatile ("sync" ::: "memory")
43
#define write_barrier() asm volatile ("eieio" ::: "memory")
46
* The IMB sequence used here is valid for all possible cache models
47
* on uniprocessor. SMP might require a different sequence.
48
* See PowerPC Programming Environment for 32-Bit Microprocessors,
52
static inline void smc_coherence(void *addr)
64
#define COHERENCE_INVAL_MIN 4
66
static inline void smc_coherence_block(void *addr, unsigned long len)
70
for (i = 0; i < len; i += COHERENCE_INVAL_MIN) {
71
asm volatile ("dcbst 0, %0\n" :: "r" (addr + i));
74
asm volatile ("sync");
76
for (i = 0; i < len; i += COHERENCE_INVAL_MIN) {
77
asm volatile ("icbi 0, %0\n" :: "r" (addr + i));