1
#ifndef __ASM_ARM_SYSTEM_H
2
#define __ASM_ARM_SYSTEM_H
7
* SCTLR_EL1/SCTLR_EL2/SCTLR_EL3 bits definitions
9
#define CR_M (1 << 0) /* MMU enable */
10
#define CR_A (1 << 1) /* Alignment abort enable */
11
#define CR_C (1 << 2) /* Dcache enable */
12
#define CR_SA (1 << 3) /* Stack Alignment Check Enable */
13
#define CR_I (1 << 12) /* Icache enable */
14
#define CR_WXN (1 << 19) /* Write Permision Imply XN */
15
#define CR_EE (1 << 25) /* Exception (Big) Endian */
17
#define PGTABLE_SIZE (0x10000)
23
"isb" : : : "memory"); \
28
"wfi" : : : "memory"); \
31
static inline unsigned int current_el(void)
34
asm volatile("mrs %0, CurrentEL" : "=r" (el) : : "cc");
38
static inline unsigned int get_sctlr(void)
44
asm volatile("mrs %0, sctlr_el1" : "=r" (val) : : "cc");
46
asm volatile("mrs %0, sctlr_el2" : "=r" (val) : : "cc");
48
asm volatile("mrs %0, sctlr_el3" : "=r" (val) : : "cc");
53
static inline void set_sctlr(unsigned int val)
59
asm volatile("msr sctlr_el1, %0" : : "r" (val) : "cc");
61
asm volatile("msr sctlr_el2, %0" : : "r" (val) : "cc");
63
asm volatile("msr sctlr_el3, %0" : : "r" (val) : "cc");
68
void __asm_flush_dcache_all(void);
69
void __asm_invalidate_dcache_all(void);
70
void __asm_flush_dcache_range(u64 start, u64 end);
71
void __asm_invalidate_tlb_all(void);
72
void __asm_invalidate_icache_all(void);
74
void armv8_switch_to_el2(void);
75
void armv8_switch_to_el1(void);
77
void gic_send_sgi(unsigned long sgino);
78
void wait_for_wakeup(void);
79
void smp_kick_all_cpus(void);
81
#endif /* __ASSEMBLY__ */
83
#else /* CONFIG_ARM64 */
87
#define CPU_ARCH_UNKNOWN 0
88
#define CPU_ARCH_ARMv3 1
89
#define CPU_ARCH_ARMv4 2
90
#define CPU_ARCH_ARMv4T 3
91
#define CPU_ARCH_ARMv5 4
92
#define CPU_ARCH_ARMv5T 5
93
#define CPU_ARCH_ARMv5TE 6
94
#define CPU_ARCH_ARMv5TEJ 7
95
#define CPU_ARCH_ARMv6 8
96
#define CPU_ARCH_ARMv7 9
99
* CR1 bits (CP#15 CR1)
101
#define CR_M (1 << 0) /* MMU enable */
102
#define CR_A (1 << 1) /* Alignment abort enable */
103
#define CR_C (1 << 2) /* Dcache enable */
104
#define CR_W (1 << 3) /* Write buffer enable */
105
#define CR_P (1 << 4) /* 32-bit exception handler */
106
#define CR_D (1 << 5) /* 32-bit data address range */
107
#define CR_L (1 << 6) /* Implementation defined */
108
#define CR_B (1 << 7) /* Big endian */
109
#define CR_S (1 << 8) /* System MMU protection */
110
#define CR_R (1 << 9) /* ROM MMU protection */
111
#define CR_F (1 << 10) /* Implementation defined */
112
#define CR_Z (1 << 11) /* Implementation defined */
113
#define CR_I (1 << 12) /* Icache enable */
114
#define CR_V (1 << 13) /* Vectors relocated to 0xffff0000 */
115
#define CR_RR (1 << 14) /* Round Robin cache replacement */
116
#define CR_L4 (1 << 15) /* LDR pc can set T bit */
117
#define CR_DT (1 << 16)
118
#define CR_IT (1 << 18)
119
#define CR_ST (1 << 19)
120
#define CR_FI (1 << 21) /* Fast interrupt (lower latency mode) */
121
#define CR_U (1 << 22) /* Unaligned access operation */
122
#define CR_XP (1 << 23) /* Extended page tables */
123
#define CR_VE (1 << 24) /* Vectored interrupts */
124
#define CR_EE (1 << 25) /* Exception (Big) Endian */
125
#define CR_TRE (1 << 28) /* TEX remap enable */
126
#define CR_AFE (1 << 29) /* Access flag enable */
127
#define CR_TE (1 << 30) /* Thumb exception enable */
129
#define PGTABLE_SIZE (4096 * 4)
132
* This is used to ensure the compiler did actually allocate the register we
133
* asked it for some inline assembly sequences. Apparently we can't trust
134
* the compiler from one version to another so a bit of paranoia won't hurt.
135
* This string is meant to be concatenated with the inline asm string and
136
* will cause compilation to stop on mismatch.
137
* (for details, see gcc PR 15089)
139
#define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t"
143
#define isb() __asm__ __volatile__ ("" : : : "memory")
145
#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
147
#ifdef __ARM_ARCH_7A__
148
#define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
153
static inline unsigned int get_cr(void)
156
asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc");
160
static inline void set_cr(unsigned int val)
162
asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR"
163
: : "r" (val) : "cc");
167
static inline unsigned int get_dacr(void)
170
asm("mrc p15, 0, %0, c3, c0, 0 @ get DACR" : "=r" (val) : : "cc");
174
static inline void set_dacr(unsigned int val)
176
asm volatile("mcr p15, 0, %0, c3, c0, 0 @ set DACR"
177
: : "r" (val) : "cc");
181
/* options available for data cache on each page */
184
DCACHE_WRITETHROUGH = 0x1a,
185
DCACHE_WRITEBACK = 0x1e,
188
/* Size of an MMU section */
190
MMU_SECTION_SHIFT = 20,
191
MMU_SECTION_SIZE = 1 << MMU_SECTION_SHIFT,
195
* Change the cache settings for a region.
197
* \param start start address of memory region to change
198
* \param size size of memory region to change
199
* \param option dcache option to select
201
void mmu_set_region_dcache_behaviour(u32 start, int size,
202
enum dcache_option option);
205
* Register an update to the page tables, and flush the TLB
207
* \param start start address of update in page table
208
* \param stop stop address of update in page table
210
void mmu_page_table_flush(unsigned long start, unsigned long stop);
212
#endif /* __ASSEMBLY__ */
214
#define arch_align_stack(x) (x)
216
#endif /* __KERNEL__ */
218
#endif /* CONFIG_ARM64 */