1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2 | /* |
3 | * linux/arch/arm/mm/proc-v7.S |
4 | * |
5 | * Copyright (C) 2001 Deep Blue Solutions Ltd. |
6 | * |
7 | * This is the "shell" of the ARMv7 processor support. |
8 | */ |
9 | #include <linux/arm-smccc.h> |
10 | #include <linux/init.h> |
11 | #include <linux/linkage.h> |
12 | #include <linux/pgtable.h> |
13 | #include <asm/assembler.h> |
14 | #include <asm/asm-offsets.h> |
15 | #include <asm/hwcap.h> |
16 | #include <asm/pgtable-hwdef.h> |
17 | #include <asm/page.h> |
18 | |
19 | #include "proc-macros.S" |
20 | |
21 | #ifdef CONFIG_ARM_LPAE |
22 | #include "proc-v7-3level.S" |
23 | #else |
24 | #include "proc-v7-2level.S" |
25 | #endif |
26 | |
27 | .arch armv7-a |
28 | |
29 | ENTRY(cpu_v7_proc_init) |
30 | ret lr |
31 | ENDPROC(cpu_v7_proc_init) |
32 | |
33 | ENTRY(cpu_v7_proc_fin) |
34 | mrc p15, 0, r0, c1, c0, 0 @ ctrl register |
35 | bic r0, r0, #0x1000 @ ...i............ |
36 | bic r0, r0, #0x0006 @ .............ca. |
37 | mcr p15, 0, r0, c1, c0, 0 @ disable caches |
38 | ret lr |
39 | ENDPROC(cpu_v7_proc_fin) |
40 | |
41 | /* |
42 | * cpu_v7_reset(loc, hyp) |
43 | * |
44 | * Perform a soft reset of the system. Put the CPU into the |
45 | * same state as it would be if it had been reset, and branch |
46 | * to what would be the reset vector. |
47 | * |
48 | * - loc - location to jump to for soft reset |
49 | * - hyp - indicate if restart occurs in HYP mode |
50 | * |
51 | * This code must be executed using a flat identity mapping with |
52 | * caches disabled. |
53 | */ |
54 | .align 5 |
55 | .pushsection .idmap.text, "ax" |
56 | ENTRY(cpu_v7_reset) |
57 | mrc p15, 0, r2, c1, c0, 0 @ ctrl register |
58 | bic r2, r2, #0x1 @ ...............m |
59 | THUMB( bic r2, r2, #1 << 30 ) @ SCTLR.TE (Thumb exceptions) |
60 | mcr p15, 0, r2, c1, c0, 0 @ disable MMU |
61 | isb |
62 | #ifdef CONFIG_ARM_VIRT_EXT |
63 | teq r1, #0 |
64 | bne __hyp_soft_restart |
65 | #endif |
66 | bx r0 |
67 | ENDPROC(cpu_v7_reset) |
68 | .popsection |
69 | |
70 | /* |
71 | * cpu_v7_do_idle() |
72 | * |
73 | * Idle the processor (eg, wait for interrupt). |
74 | * |
75 | * IRQs are already disabled. |
76 | */ |
77 | ENTRY(cpu_v7_do_idle) |
78 | dsb @ WFI may enter a low-power mode |
79 | wfi |
80 | ret lr |
81 | ENDPROC(cpu_v7_do_idle) |
82 | |
83 | ENTRY(cpu_v7_dcache_clean_area) |
84 | ALT_SMP(W(nop)) @ MP extensions imply L1 PTW |
85 | ALT_UP_B(1f) |
86 | ret lr |
87 | 1: dcache_line_size r2, r3 |
88 | 2: mcr p15, 0, r0, c7, c10, 1 @ clean D entry |
89 | add r0, r0, r2 |
90 | subs r1, r1, r2 |
91 | bhi 2b |
92 | dsb ishst |
93 | ret lr |
94 | ENDPROC(cpu_v7_dcache_clean_area) |
95 | |
96 | #ifdef CONFIG_ARM_PSCI |
97 | .arch_extension sec |
98 | ENTRY(cpu_v7_smc_switch_mm) |
99 | stmfd sp!, {r0 - r3} |
100 | movw r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1 |
101 | movt r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1 |
102 | smc #0 |
103 | ldmfd sp!, {r0 - r3} |
104 | b cpu_v7_switch_mm |
105 | ENDPROC(cpu_v7_smc_switch_mm) |
106 | .arch_extension virt |
107 | ENTRY(cpu_v7_hvc_switch_mm) |
108 | stmfd sp!, {r0 - r3} |
109 | movw r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1 |
110 | movt r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1 |
111 | hvc #0 |
112 | ldmfd sp!, {r0 - r3} |
113 | b cpu_v7_switch_mm |
114 | ENDPROC(cpu_v7_hvc_switch_mm) |
115 | #endif |
116 | ENTRY(cpu_v7_iciallu_switch_mm) |
117 | mov r3, #0 |
118 | mcr p15, 0, r3, c7, c5, 0 @ ICIALLU |
119 | b cpu_v7_switch_mm |
120 | ENDPROC(cpu_v7_iciallu_switch_mm) |
121 | ENTRY(cpu_v7_bpiall_switch_mm) |
122 | mov r3, #0 |
123 | mcr p15, 0, r3, c7, c5, 6 @ flush BTAC/BTB |
124 | b cpu_v7_switch_mm |
125 | ENDPROC(cpu_v7_bpiall_switch_mm) |
126 | |
127 | string cpu_v7_name, "ARMv7 Processor" |
128 | .align |
129 | |
130 | /* Suspend/resume support: derived from arch/arm/mach-s5pv210/sleep.S */ |
131 | .globl cpu_v7_suspend_size |
132 | .equ cpu_v7_suspend_size, 4 * 9 |
133 | #ifdef CONFIG_ARM_CPU_SUSPEND |
134 | ENTRY(cpu_v7_do_suspend) |
135 | stmfd sp!, {r4 - r11, lr} |
136 | mrc p15, 0, r4, c13, c0, 0 @ FCSE/PID |
137 | mrc p15, 0, r5, c13, c0, 3 @ User r/o thread ID |
138 | stmia r0!, {r4 - r5} |
139 | #ifdef CONFIG_MMU |
140 | mrc p15, 0, r6, c3, c0, 0 @ Domain ID |
141 | #ifdef CONFIG_ARM_LPAE |
142 | mrrc p15, 1, r5, r7, c2 @ TTB 1 |
143 | #else |
144 | mrc p15, 0, r7, c2, c0, 1 @ TTB 1 |
145 | #endif |
146 | mrc p15, 0, r11, c2, c0, 2 @ TTB control register |
147 | #endif |
148 | mrc p15, 0, r8, c1, c0, 0 @ Control register |
149 | mrc p15, 0, r9, c1, c0, 1 @ Auxiliary control register |
150 | mrc p15, 0, r10, c1, c0, 2 @ Co-processor access control |
151 | stmia r0, {r5 - r11} |
152 | ldmfd sp!, {r4 - r11, pc} |
153 | ENDPROC(cpu_v7_do_suspend) |
154 | |
155 | ENTRY(cpu_v7_do_resume) |
156 | mov ip, #0 |
157 | mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache |
158 | mcr p15, 0, ip, c13, c0, 1 @ set reserved context ID |
159 | ldmia r0!, {r4 - r5} |
160 | mcr p15, 0, r4, c13, c0, 0 @ FCSE/PID |
161 | mcr p15, 0, r5, c13, c0, 3 @ User r/o thread ID |
162 | ldmia r0, {r5 - r11} |
163 | #ifdef CONFIG_MMU |
164 | mcr p15, 0, ip, c8, c7, 0 @ invalidate TLBs |
165 | mcr p15, 0, r6, c3, c0, 0 @ Domain ID |
166 | #ifdef CONFIG_ARM_LPAE |
167 | mcrr p15, 0, r1, ip, c2 @ TTB 0 |
168 | mcrr p15, 1, r5, r7, c2 @ TTB 1 |
169 | #else |
170 | ALT_SMP(orr r1, r1, #TTB_FLAGS_SMP) |
171 | ALT_UP(orr r1, r1, #TTB_FLAGS_UP) |
172 | mcr p15, 0, r1, c2, c0, 0 @ TTB 0 |
173 | mcr p15, 0, r7, c2, c0, 1 @ TTB 1 |
174 | #endif |
175 | mcr p15, 0, r11, c2, c0, 2 @ TTB control register |
176 | ldr r4, =PRRR @ PRRR |
177 | ldr r5, =NMRR @ NMRR |
178 | mcr p15, 0, r4, c10, c2, 0 @ write PRRR |
179 | mcr p15, 0, r5, c10, c2, 1 @ write NMRR |
180 | #endif /* CONFIG_MMU */ |
181 | mrc p15, 0, r4, c1, c0, 1 @ Read Auxiliary control register |
182 | teq r4, r9 @ Is it already set? |
183 | mcrne p15, 0, r9, c1, c0, 1 @ No, so write it |
184 | mcr p15, 0, r10, c1, c0, 2 @ Co-processor access control |
185 | isb |
186 | dsb |
187 | mov r0, r8 @ control register |
188 | b cpu_resume_mmu |
189 | ENDPROC(cpu_v7_do_resume) |
190 | #endif |
191 | |
192 | .globl cpu_ca9mp_suspend_size |
193 | .equ cpu_ca9mp_suspend_size, cpu_v7_suspend_size + 4 * 2 |
194 | #ifdef CONFIG_ARM_CPU_SUSPEND |
195 | ENTRY(cpu_ca9mp_do_suspend) |
196 | stmfd sp!, {r4 - r5} |
197 | mrc p15, 0, r4, c15, c0, 1 @ Diagnostic register |
198 | mrc p15, 0, r5, c15, c0, 0 @ Power register |
199 | stmia r0!, {r4 - r5} |
200 | ldmfd sp!, {r4 - r5} |
201 | b cpu_v7_do_suspend |
202 | ENDPROC(cpu_ca9mp_do_suspend) |
203 | |
204 | ENTRY(cpu_ca9mp_do_resume) |
205 | ldmia r0!, {r4 - r5} |
206 | mrc p15, 0, r10, c15, c0, 1 @ Read Diagnostic register |
207 | teq r4, r10 @ Already restored? |
208 | mcrne p15, 0, r4, c15, c0, 1 @ No, so restore it |
209 | mrc p15, 0, r10, c15, c0, 0 @ Read Power register |
210 | teq r5, r10 @ Already restored? |
211 | mcrne p15, 0, r5, c15, c0, 0 @ No, so restore it |
212 | b cpu_v7_do_resume |
213 | ENDPROC(cpu_ca9mp_do_resume) |
214 | #endif |
215 | |
216 | #ifdef CONFIG_CPU_PJ4B |
217 | globl_equ cpu_pj4b_switch_mm, cpu_v7_switch_mm |
218 | globl_equ cpu_pj4b_set_pte_ext, cpu_v7_set_pte_ext |
219 | globl_equ cpu_pj4b_proc_init, cpu_v7_proc_init |
220 | globl_equ cpu_pj4b_proc_fin, cpu_v7_proc_fin |
221 | globl_equ cpu_pj4b_reset, cpu_v7_reset |
222 | #ifdef CONFIG_PJ4B_ERRATA_4742 |
223 | ENTRY(cpu_pj4b_do_idle) |
224 | dsb @ WFI may enter a low-power mode |
225 | wfi |
226 | dsb @barrier |
227 | ret lr |
228 | ENDPROC(cpu_pj4b_do_idle) |
229 | #else |
230 | globl_equ cpu_pj4b_do_idle, cpu_v7_do_idle |
231 | #endif |
232 | globl_equ cpu_pj4b_dcache_clean_area, cpu_v7_dcache_clean_area |
233 | #ifdef CONFIG_ARM_CPU_SUSPEND |
234 | ENTRY(cpu_pj4b_do_suspend) |
235 | stmfd sp!, {r6 - r10} |
236 | mrc p15, 1, r6, c15, c1, 0 @ save CP15 - extra features |
237 | mrc p15, 1, r7, c15, c2, 0 @ save CP15 - Aux Func Modes Ctrl 0 |
238 | mrc p15, 1, r8, c15, c1, 2 @ save CP15 - Aux Debug Modes Ctrl 2 |
239 | mrc p15, 1, r9, c15, c1, 1 @ save CP15 - Aux Debug Modes Ctrl 1 |
240 | mrc p15, 0, r10, c9, c14, 0 @ save CP15 - PMC |
241 | stmia r0!, {r6 - r10} |
242 | ldmfd sp!, {r6 - r10} |
243 | b cpu_v7_do_suspend |
244 | ENDPROC(cpu_pj4b_do_suspend) |
245 | |
246 | ENTRY(cpu_pj4b_do_resume) |
247 | ldmia r0!, {r6 - r10} |
248 | mcr p15, 1, r6, c15, c1, 0 @ restore CP15 - extra features |
249 | mcr p15, 1, r7, c15, c2, 0 @ restore CP15 - Aux Func Modes Ctrl 0 |
250 | mcr p15, 1, r8, c15, c1, 2 @ restore CP15 - Aux Debug Modes Ctrl 2 |
251 | mcr p15, 1, r9, c15, c1, 1 @ restore CP15 - Aux Debug Modes Ctrl 1 |
252 | mcr p15, 0, r10, c9, c14, 0 @ restore CP15 - PMC |
253 | b cpu_v7_do_resume |
254 | ENDPROC(cpu_pj4b_do_resume) |
255 | #endif |
256 | .globl cpu_pj4b_suspend_size |
257 | .equ cpu_pj4b_suspend_size, cpu_v7_suspend_size + 4 * 5 |
258 | |
259 | #endif |
260 | |
261 | @ |
262 | @ Invoke the v7_invalidate_l1() function, which adheres to the AAPCS |
263 | @ rules, and so it may corrupt registers that we need to preserve. |
264 | @ |
265 | .macro do_invalidate_l1 |
266 | mov r6, r1 |
267 | mov r7, r2 |
268 | mov r10, lr |
269 | bl v7_invalidate_l1 @ corrupts {r0-r3, ip, lr} |
270 | mov r1, r6 |
271 | mov r2, r7 |
272 | mov lr, r10 |
273 | .endm |
274 | |
275 | /* |
276 | * __v7_setup |
277 | * |
278 | * Initialise TLB, Caches, and MMU state ready to switch the MMU |
279 | * on. Return in r0 the new CP15 C1 control register setting. |
280 | * |
281 | * r1, r2, r4, r5, r9, r13 must be preserved - r13 is not a stack |
282 | * r4: TTBR0 (low word) |
283 | * r5: TTBR0 (high word if LPAE) |
284 | * r8: TTBR1 |
285 | * r9: Main ID register |
286 | * |
287 | * This should be able to cover all ARMv7 cores. |
288 | * |
289 | * It is assumed that: |
290 | * - cache type register is implemented |
291 | */ |
292 | __v7_ca5mp_setup: |
293 | __v7_ca9mp_setup: |
294 | __v7_cr7mp_setup: |
295 | __v7_cr8mp_setup: |
296 | do_invalidate_l1 |
297 | mov r10, #(1 << 0) @ Cache/TLB ops broadcasting |
298 | b 1f |
299 | __v7_ca7mp_setup: |
300 | __v7_ca12mp_setup: |
301 | __v7_ca15mp_setup: |
302 | __v7_b15mp_setup: |
303 | __v7_ca17mp_setup: |
304 | do_invalidate_l1 |
305 | mov r10, #0 |
306 | 1: |
307 | #ifdef CONFIG_SMP |
308 | orr r10, r10, #(1 << 6) @ Enable SMP/nAMP mode |
309 | ALT_SMP(mrc p15, 0, r0, c1, c0, 1) |
310 | ALT_UP(mov r0, r10) @ fake it for UP |
311 | orr r10, r10, r0 @ Set required bits |
312 | teq r10, r0 @ Were they already set? |
313 | mcrne p15, 0, r10, c1, c0, 1 @ No, update register |
314 | #endif |
315 | b __v7_setup_cont |
316 | |
317 | /* |
318 | * Errata: |
319 | * r0, r10 available for use |
320 | * r1, r2, r4, r5, r9, r13: must be preserved |
321 | * r3: contains MIDR rX number in bits 23-20 |
322 | * r6: contains MIDR rXpY as 8-bit XY number |
323 | * r9: MIDR |
324 | */ |
325 | __ca8_errata: |
326 | #if defined(CONFIG_ARM_ERRATA_430973) && !defined(CONFIG_ARCH_MULTIPLATFORM) |
327 | teq r3, #0x00100000 @ only present in r1p* |
328 | mrceq p15, 0, r0, c1, c0, 1 @ read aux control register |
329 | orreq r0, r0, #(1 << 6) @ set IBE to 1 |
330 | mcreq p15, 0, r0, c1, c0, 1 @ write aux control register |
331 | #endif |
332 | #ifdef CONFIG_ARM_ERRATA_458693 |
333 | teq r6, #0x20 @ only present in r2p0 |
334 | mrceq p15, 0, r0, c1, c0, 1 @ read aux control register |
335 | orreq r0, r0, #(1 << 5) @ set L1NEON to 1 |
336 | orreq r0, r0, #(1 << 9) @ set PLDNOP to 1 |
337 | mcreq p15, 0, r0, c1, c0, 1 @ write aux control register |
338 | #endif |
339 | #ifdef CONFIG_ARM_ERRATA_460075 |
340 | teq r6, #0x20 @ only present in r2p0 |
341 | mrceq p15, 1, r0, c9, c0, 2 @ read L2 cache aux ctrl register |
342 | tsteq r0, #1 << 22 |
343 | orreq r0, r0, #(1 << 22) @ set the Write Allocate disable bit |
344 | mcreq p15, 1, r0, c9, c0, 2 @ write the L2 cache aux ctrl register |
345 | #endif |
346 | b __errata_finish |
347 | |
348 | __ca9_errata: |
349 | #ifdef CONFIG_ARM_ERRATA_742230 |
350 | cmp r6, #0x22 @ only present up to r2p2 |
351 | mrcle p15, 0, r0, c15, c0, 1 @ read diagnostic register |
352 | orrle r0, r0, #1 << 4 @ set bit #4 |
353 | mcrle p15, 0, r0, c15, c0, 1 @ write diagnostic register |
354 | #endif |
355 | #ifdef CONFIG_ARM_ERRATA_742231 |
356 | teq r6, #0x20 @ present in r2p0 |
357 | teqne r6, #0x21 @ present in r2p1 |
358 | teqne r6, #0x22 @ present in r2p2 |
359 | mrceq p15, 0, r0, c15, c0, 1 @ read diagnostic register |
360 | orreq r0, r0, #1 << 12 @ set bit #12 |
361 | orreq r0, r0, #1 << 22 @ set bit #22 |
362 | mcreq p15, 0, r0, c15, c0, 1 @ write diagnostic register |
363 | #endif |
364 | #ifdef CONFIG_ARM_ERRATA_743622 |
365 | teq r3, #0x00200000 @ only present in r2p* |
366 | mrceq p15, 0, r0, c15, c0, 1 @ read diagnostic register |
367 | orreq r0, r0, #1 << 6 @ set bit #6 |
368 | mcreq p15, 0, r0, c15, c0, 1 @ write diagnostic register |
369 | #endif |
370 | #if defined(CONFIG_ARM_ERRATA_751472) && defined(CONFIG_SMP) |
371 | ALT_SMP(cmp r6, #0x30) @ present prior to r3p0 |
372 | ALT_UP_B(1f) |
373 | mrclt p15, 0, r0, c15, c0, 1 @ read diagnostic register |
374 | orrlt r0, r0, #1 << 11 @ set bit #11 |
375 | mcrlt p15, 0, r0, c15, c0, 1 @ write diagnostic register |
376 | 1: |
377 | #endif |
378 | b __errata_finish |
379 | |
380 | __ca15_errata: |
381 | #ifdef CONFIG_ARM_ERRATA_773022 |
382 | cmp r6, #0x4 @ only present up to r0p4 |
383 | mrcle p15, 0, r0, c1, c0, 1 @ read aux control register |
384 | orrle r0, r0, #1 << 1 @ disable loop buffer |
385 | mcrle p15, 0, r0, c1, c0, 1 @ write aux control register |
386 | #endif |
387 | b __errata_finish |
388 | |
389 | __ca12_errata: |
390 | #ifdef CONFIG_ARM_ERRATA_818325_852422 |
391 | mrc p15, 0, r10, c15, c0, 1 @ read diagnostic register |
392 | orr r10, r10, #1 << 12 @ set bit #12 |
393 | mcr p15, 0, r10, c15, c0, 1 @ write diagnostic register |
394 | #endif |
395 | #ifdef CONFIG_ARM_ERRATA_821420 |
396 | mrc p15, 0, r10, c15, c0, 2 @ read internal feature reg |
397 | orr r10, r10, #1 << 1 @ set bit #1 |
398 | mcr p15, 0, r10, c15, c0, 2 @ write internal feature reg |
399 | #endif |
400 | #ifdef CONFIG_ARM_ERRATA_825619 |
401 | mrc p15, 0, r10, c15, c0, 1 @ read diagnostic register |
402 | orr r10, r10, #1 << 24 @ set bit #24 |
403 | mcr p15, 0, r10, c15, c0, 1 @ write diagnostic register |
404 | #endif |
405 | #ifdef CONFIG_ARM_ERRATA_857271 |
406 | mrc p15, 0, r10, c15, c0, 1 @ read diagnostic register |
407 | orr r10, r10, #3 << 10 @ set bits #10 and #11 |
408 | mcr p15, 0, r10, c15, c0, 1 @ write diagnostic register |
409 | #endif |
410 | b __errata_finish |
411 | |
412 | __ca17_errata: |
413 | #ifdef CONFIG_ARM_ERRATA_852421 |
414 | cmp r6, #0x12 @ only present up to r1p2 |
415 | mrcle p15, 0, r10, c15, c0, 1 @ read diagnostic register |
416 | orrle r10, r10, #1 << 24 @ set bit #24 |
417 | mcrle p15, 0, r10, c15, c0, 1 @ write diagnostic register |
418 | #endif |
419 | #ifdef CONFIG_ARM_ERRATA_852423 |
420 | cmp r6, #0x12 @ only present up to r1p2 |
421 | mrcle p15, 0, r10, c15, c0, 1 @ read diagnostic register |
422 | orrle r10, r10, #1 << 12 @ set bit #12 |
423 | mcrle p15, 0, r10, c15, c0, 1 @ write diagnostic register |
424 | #endif |
425 | #ifdef CONFIG_ARM_ERRATA_857272 |
426 | mrc p15, 0, r10, c15, c0, 1 @ read diagnostic register |
427 | orr r10, r10, #3 << 10 @ set bits #10 and #11 |
428 | mcr p15, 0, r10, c15, c0, 1 @ write diagnostic register |
429 | #endif |
430 | b __errata_finish |
431 | |
432 | __v7_pj4b_setup: |
433 | #ifdef CONFIG_CPU_PJ4B |
434 | |
435 | /* Auxiliary Debug Modes Control 1 Register */ |
436 | #define PJ4B_STATIC_BP (1 << 2) /* Enable Static BP */ |
437 | #define PJ4B_INTER_PARITY (1 << 8) /* Disable Internal Parity Handling */ |
438 | #define PJ4B_CLEAN_LINE (1 << 16) /* Disable data transfer for clean line */ |
439 | |
440 | /* Auxiliary Debug Modes Control 2 Register */ |
441 | #define PJ4B_FAST_LDR (1 << 23) /* Disable fast LDR */ |
442 | #define PJ4B_SNOOP_DATA (1 << 25) /* Do not interleave write and snoop data */ |
443 | #define PJ4B_CWF (1 << 27) /* Disable Critical Word First feature */ |
444 | #define PJ4B_OUTSDNG_NC (1 << 29) /* Disable outstanding non cacheable rqst */ |
445 | #define PJ4B_L1_REP_RR (1 << 30) /* L1 replacement - Strict round robin */ |
446 | #define PJ4B_AUX_DBG_CTRL2 (PJ4B_SNOOP_DATA | PJ4B_CWF |\ |
447 | PJ4B_OUTSDNG_NC | PJ4B_L1_REP_RR) |
448 | |
449 | /* Auxiliary Functional Modes Control Register 0 */ |
450 | #define PJ4B_SMP_CFB (1 << 1) /* Set SMP mode. Join the coherency fabric */ |
451 | #define PJ4B_L1_PAR_CHK (1 << 2) /* Support L1 parity checking */ |
452 | #define PJ4B_BROADCAST_CACHE (1 << 8) /* Broadcast Cache and TLB maintenance */ |
453 | |
454 | /* Auxiliary Debug Modes Control 0 Register */ |
455 | #define PJ4B_WFI_WFE (1 << 22) /* WFI/WFE - serve the DVM and back to idle */ |
456 | |
457 | /* Auxiliary Debug Modes Control 1 Register */ |
458 | mrc p15, 1, r0, c15, c1, 1 |
459 | orr r0, r0, #PJ4B_CLEAN_LINE |
460 | orr r0, r0, #PJ4B_INTER_PARITY |
461 | bic r0, r0, #PJ4B_STATIC_BP |
462 | mcr p15, 1, r0, c15, c1, 1 |
463 | |
464 | /* Auxiliary Debug Modes Control 2 Register */ |
465 | mrc p15, 1, r0, c15, c1, 2 |
466 | bic r0, r0, #PJ4B_FAST_LDR |
467 | orr r0, r0, #PJ4B_AUX_DBG_CTRL2 |
468 | mcr p15, 1, r0, c15, c1, 2 |
469 | |
470 | /* Auxiliary Functional Modes Control Register 0 */ |
471 | mrc p15, 1, r0, c15, c2, 0 |
472 | #ifdef CONFIG_SMP |
473 | orr r0, r0, #PJ4B_SMP_CFB |
474 | #endif |
475 | orr r0, r0, #PJ4B_L1_PAR_CHK |
476 | orr r0, r0, #PJ4B_BROADCAST_CACHE |
477 | mcr p15, 1, r0, c15, c2, 0 |
478 | |
479 | /* Auxiliary Debug Modes Control 0 Register */ |
480 | mrc p15, 1, r0, c15, c1, 0 |
481 | orr r0, r0, #PJ4B_WFI_WFE |
482 | mcr p15, 1, r0, c15, c1, 0 |
483 | |
484 | #endif /* CONFIG_CPU_PJ4B */ |
485 | |
486 | __v7_setup: |
487 | do_invalidate_l1 |
488 | |
489 | __v7_setup_cont: |
490 | and r0, r9, #0xff000000 @ ARM? |
491 | teq r0, #0x41000000 |
492 | bne __errata_finish |
493 | and r3, r9, #0x00f00000 @ variant |
494 | and r6, r9, #0x0000000f @ revision |
495 | orr r6, r6, r3, lsr #20-4 @ combine variant and revision |
496 | ubfx r0, r9, #4, #12 @ primary part number |
497 | |
498 | /* Cortex-A8 Errata */ |
499 | ldr r10, =0x00000c08 @ Cortex-A8 primary part number |
500 | teq r0, r10 |
501 | beq __ca8_errata |
502 | |
503 | /* Cortex-A9 Errata */ |
504 | ldr r10, =0x00000c09 @ Cortex-A9 primary part number |
505 | teq r0, r10 |
506 | beq __ca9_errata |
507 | |
508 | /* Cortex-A12 Errata */ |
509 | ldr r10, =0x00000c0d @ Cortex-A12 primary part number |
510 | teq r0, r10 |
511 | beq __ca12_errata |
512 | |
513 | /* Cortex-A17 Errata */ |
514 | ldr r10, =0x00000c0e @ Cortex-A17 primary part number |
515 | teq r0, r10 |
516 | beq __ca17_errata |
517 | |
518 | /* Cortex-A15 Errata */ |
519 | ldr r10, =0x00000c0f @ Cortex-A15 primary part number |
520 | teq r0, r10 |
521 | beq __ca15_errata |
522 | |
523 | __errata_finish: |
524 | mov r10, #0 |
525 | mcr p15, 0, r10, c7, c5, 0 @ I+BTB cache invalidate |
526 | #ifdef CONFIG_MMU |
527 | mcr p15, 0, r10, c8, c7, 0 @ invalidate I + D TLBs |
528 | v7_ttb_setup r10, r4, r5, r8, r3 @ TTBCR, TTBRx setup |
529 | ldr r3, =PRRR @ PRRR |
530 | ldr r6, =NMRR @ NMRR |
531 | mcr p15, 0, r3, c10, c2, 0 @ write PRRR |
532 | mcr p15, 0, r6, c10, c2, 1 @ write NMRR |
533 | #endif |
534 | dsb @ Complete invalidations |
535 | #ifndef CONFIG_ARM_THUMBEE |
536 | mrc p15, 0, r0, c0, c1, 0 @ read ID_PFR0 for ThumbEE |
537 | and r0, r0, #(0xf << 12) @ ThumbEE enabled field |
538 | teq r0, #(1 << 12) @ check if ThumbEE is present |
539 | bne 1f |
540 | mov r3, #0 |
541 | mcr p14, 6, r3, c1, c0, 0 @ Initialize TEEHBR to 0 |
542 | mrc p14, 6, r0, c0, c0, 0 @ load TEECR |
543 | orr r0, r0, #1 @ set the 1st bit in order to |
544 | mcr p14, 6, r0, c0, c0, 0 @ stop userspace TEEHBR access |
545 | 1: |
546 | #endif |
547 | adr r3, v7_crval |
548 | ldmia r3, {r3, r6} |
549 | ARM_BE8(orr r6, r6, #1 << 25) @ big-endian page tables |
550 | #ifdef CONFIG_SWP_EMULATE |
551 | orr r3, r3, #(1 << 10) @ set SW bit in "clear" |
552 | bic r6, r6, #(1 << 10) @ clear it in "mmuset" |
553 | #endif |
554 | mrc p15, 0, r0, c1, c0, 0 @ read control register |
555 | bic r0, r0, r3 @ clear bits them |
556 | orr r0, r0, r6 @ set them |
557 | THUMB( orr r0, r0, #1 << 30 ) @ Thumb exceptions |
558 | ret lr @ return to head.S:__ret |
559 | ENDPROC(__v7_setup) |
560 | |
561 | __INITDATA |
562 | |
563 | .weak cpu_v7_bugs_init |
564 | |
565 | @ define struct processor (see <asm/proc-fns.h> and proc-macros.S) |
566 | define_processor_functions v7, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init |
567 | |
568 | #ifdef CONFIG_HARDEN_BRANCH_PREDICTOR |
569 | @ generic v7 bpiall on context switch |
570 | globl_equ cpu_v7_bpiall_proc_init, cpu_v7_proc_init |
571 | globl_equ cpu_v7_bpiall_proc_fin, cpu_v7_proc_fin |
572 | globl_equ cpu_v7_bpiall_reset, cpu_v7_reset |
573 | globl_equ cpu_v7_bpiall_do_idle, cpu_v7_do_idle |
574 | globl_equ cpu_v7_bpiall_dcache_clean_area, cpu_v7_dcache_clean_area |
575 | globl_equ cpu_v7_bpiall_set_pte_ext, cpu_v7_set_pte_ext |
576 | globl_equ cpu_v7_bpiall_suspend_size, cpu_v7_suspend_size |
577 | #ifdef CONFIG_ARM_CPU_SUSPEND |
578 | globl_equ cpu_v7_bpiall_do_suspend, cpu_v7_do_suspend |
579 | globl_equ cpu_v7_bpiall_do_resume, cpu_v7_do_resume |
580 | #endif |
581 | define_processor_functions v7_bpiall, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init |
582 | |
583 | #define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_bpiall_processor_functions |
584 | #else |
585 | #define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_processor_functions |
586 | #endif |
587 | |
588 | #ifndef CONFIG_ARM_LPAE |
589 | @ Cortex-A8 - always needs bpiall switch_mm implementation |
590 | globl_equ cpu_ca8_proc_init, cpu_v7_proc_init |
591 | globl_equ cpu_ca8_proc_fin, cpu_v7_proc_fin |
592 | globl_equ cpu_ca8_reset, cpu_v7_reset |
593 | globl_equ cpu_ca8_do_idle, cpu_v7_do_idle |
594 | globl_equ cpu_ca8_dcache_clean_area, cpu_v7_dcache_clean_area |
595 | globl_equ cpu_ca8_set_pte_ext, cpu_v7_set_pte_ext |
596 | globl_equ cpu_ca8_switch_mm, cpu_v7_bpiall_switch_mm |
597 | globl_equ cpu_ca8_suspend_size, cpu_v7_suspend_size |
598 | #ifdef CONFIG_ARM_CPU_SUSPEND |
599 | globl_equ cpu_ca8_do_suspend, cpu_v7_do_suspend |
600 | globl_equ cpu_ca8_do_resume, cpu_v7_do_resume |
601 | #endif |
602 | define_processor_functions ca8, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca8_ibe |
603 | |
604 | @ Cortex-A9 - needs more registers preserved across suspend/resume |
605 | @ and bpiall switch_mm for hardening |
606 | globl_equ cpu_ca9mp_proc_init, cpu_v7_proc_init |
607 | globl_equ cpu_ca9mp_proc_fin, cpu_v7_proc_fin |
608 | globl_equ cpu_ca9mp_reset, cpu_v7_reset |
609 | globl_equ cpu_ca9mp_do_idle, cpu_v7_do_idle |
610 | globl_equ cpu_ca9mp_dcache_clean_area, cpu_v7_dcache_clean_area |
611 | #ifdef CONFIG_HARDEN_BRANCH_PREDICTOR |
612 | globl_equ cpu_ca9mp_switch_mm, cpu_v7_bpiall_switch_mm |
613 | #else |
614 | globl_equ cpu_ca9mp_switch_mm, cpu_v7_switch_mm |
615 | #endif |
616 | globl_equ cpu_ca9mp_set_pte_ext, cpu_v7_set_pte_ext |
617 | define_processor_functions ca9mp, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init |
618 | #endif |
619 | |
620 | @ Cortex-A15 - needs iciallu switch_mm for hardening |
621 | globl_equ cpu_ca15_proc_init, cpu_v7_proc_init |
622 | globl_equ cpu_ca15_proc_fin, cpu_v7_proc_fin |
623 | globl_equ cpu_ca15_reset, cpu_v7_reset |
624 | globl_equ cpu_ca15_do_idle, cpu_v7_do_idle |
625 | globl_equ cpu_ca15_dcache_clean_area, cpu_v7_dcache_clean_area |
626 | #ifdef CONFIG_HARDEN_BRANCH_PREDICTOR |
627 | globl_equ cpu_ca15_switch_mm, cpu_v7_iciallu_switch_mm |
628 | #else |
629 | globl_equ cpu_ca15_switch_mm, cpu_v7_switch_mm |
630 | #endif |
631 | globl_equ cpu_ca15_set_pte_ext, cpu_v7_set_pte_ext |
632 | globl_equ cpu_ca15_suspend_size, cpu_v7_suspend_size |
633 | globl_equ cpu_ca15_do_suspend, cpu_v7_do_suspend |
634 | globl_equ cpu_ca15_do_resume, cpu_v7_do_resume |
635 | define_processor_functions ca15, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca15_ibe |
636 | #ifdef CONFIG_CPU_PJ4B |
637 | define_processor_functions pj4b, dabort=v7_early_abort, pabort=v7_pabort, suspend=1 |
638 | #endif |
639 | |
640 | .section ".rodata" |
641 | |
642 | string cpu_arch_name, "armv7" |
643 | string cpu_elf_name, "v7" |
644 | .align |
645 | |
646 | .section ".proc.info.init" , "a" |
647 | |
648 | /* |
649 | * Standard v7 proc info content |
650 | */ |
651 | .macro __v7_proc name, initfunc, mm_mmuflags = 0, io_mmuflags = 0, hwcaps = 0, proc_fns = v7_processor_functions, cache_fns = v7_cache_fns |
652 | ALT_SMP(.long PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \ |
653 | PMD_SECT_AF | PMD_FLAGS_SMP | \mm_mmuflags) |
654 | ALT_UP(.long PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \ |
655 | PMD_SECT_AF | PMD_FLAGS_UP | \mm_mmuflags) |
656 | .long PMD_TYPE_SECT | PMD_SECT_AP_WRITE | \ |
657 | PMD_SECT_AP_READ | PMD_SECT_AF | \io_mmuflags |
658 | initfn \initfunc, \name |
659 | .long cpu_arch_name |
660 | .long cpu_elf_name |
661 | .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_FAST_MULT | \ |
662 | HWCAP_EDSP | HWCAP_TLS | \hwcaps |
663 | .long cpu_v7_name |
664 | .long \proc_fns |
665 | .long v7wbi_tlb_fns |
666 | .long v6_user_fns |
667 | .long \cache_fns |
668 | .endm |
669 | |
670 | #ifndef CONFIG_ARM_LPAE |
671 | /* |
672 | * ARM Ltd. Cortex A5 processor. |
673 | */ |
674 | .type __v7_ca5mp_proc_info, #object |
675 | __v7_ca5mp_proc_info: |
676 | .long 0x410fc050 |
677 | .long 0xff0ffff0 |
678 | __v7_proc __v7_ca5mp_proc_info, __v7_ca5mp_setup |
679 | .size __v7_ca5mp_proc_info, . - __v7_ca5mp_proc_info |
680 | |
681 | /* |
682 | * ARM Ltd. Cortex A9 processor. |
683 | */ |
684 | .type __v7_ca9mp_proc_info, #object |
685 | __v7_ca9mp_proc_info: |
686 | .long 0x410fc090 |
687 | .long 0xff0ffff0 |
688 | __v7_proc __v7_ca9mp_proc_info, __v7_ca9mp_setup, proc_fns = ca9mp_processor_functions |
689 | .size __v7_ca9mp_proc_info, . - __v7_ca9mp_proc_info |
690 | |
691 | /* |
692 | * ARM Ltd. Cortex A8 processor. |
693 | */ |
694 | .type __v7_ca8_proc_info, #object |
695 | __v7_ca8_proc_info: |
696 | .long 0x410fc080 |
697 | .long 0xff0ffff0 |
698 | __v7_proc __v7_ca8_proc_info, __v7_setup, proc_fns = ca8_processor_functions |
699 | .size __v7_ca8_proc_info, . - __v7_ca8_proc_info |
700 | |
701 | #endif /* CONFIG_ARM_LPAE */ |
702 | |
703 | /* |
704 | * Marvell PJ4B processor. |
705 | */ |
706 | #ifdef CONFIG_CPU_PJ4B |
707 | .type __v7_pj4b_proc_info, #object |
708 | __v7_pj4b_proc_info: |
709 | .long 0x560f5800 |
710 | .long 0xff0fff00 |
711 | __v7_proc __v7_pj4b_proc_info, __v7_pj4b_setup, proc_fns = pj4b_processor_functions |
712 | .size __v7_pj4b_proc_info, . - __v7_pj4b_proc_info |
713 | #endif |
714 | |
715 | /* |
716 | * ARM Ltd. Cortex R7 processor. |
717 | */ |
718 | .type __v7_cr7mp_proc_info, #object |
719 | __v7_cr7mp_proc_info: |
720 | .long 0x410fc170 |
721 | .long 0xff0ffff0 |
722 | __v7_proc __v7_cr7mp_proc_info, __v7_cr7mp_setup |
723 | .size __v7_cr7mp_proc_info, . - __v7_cr7mp_proc_info |
724 | |
725 | /* |
726 | * ARM Ltd. Cortex R8 processor. |
727 | */ |
728 | .type __v7_cr8mp_proc_info, #object |
729 | __v7_cr8mp_proc_info: |
730 | .long 0x410fc180 |
731 | .long 0xff0ffff0 |
732 | __v7_proc __v7_cr8mp_proc_info, __v7_cr8mp_setup |
733 | .size __v7_cr8mp_proc_info, . - __v7_cr8mp_proc_info |
734 | |
735 | /* |
736 | * ARM Ltd. Cortex A7 processor. |
737 | */ |
738 | .type __v7_ca7mp_proc_info, #object |
739 | __v7_ca7mp_proc_info: |
740 | .long 0x410fc070 |
741 | .long 0xff0ffff0 |
742 | __v7_proc __v7_ca7mp_proc_info, __v7_ca7mp_setup |
743 | .size __v7_ca7mp_proc_info, . - __v7_ca7mp_proc_info |
744 | |
745 | /* |
746 | * ARM Ltd. Cortex A12 processor. |
747 | */ |
748 | .type __v7_ca12mp_proc_info, #object |
749 | __v7_ca12mp_proc_info: |
750 | .long 0x410fc0d0 |
751 | .long 0xff0ffff0 |
752 | __v7_proc __v7_ca12mp_proc_info, __v7_ca12mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS |
753 | .size __v7_ca12mp_proc_info, . - __v7_ca12mp_proc_info |
754 | |
755 | /* |
756 | * ARM Ltd. Cortex A15 processor. |
757 | */ |
758 | .type __v7_ca15mp_proc_info, #object |
759 | __v7_ca15mp_proc_info: |
760 | .long 0x410fc0f0 |
761 | .long 0xff0ffff0 |
762 | __v7_proc __v7_ca15mp_proc_info, __v7_ca15mp_setup, proc_fns = ca15_processor_functions |
763 | .size __v7_ca15mp_proc_info, . - __v7_ca15mp_proc_info |
764 | |
765 | /* |
766 | * Broadcom Corporation Brahma-B15 processor. |
767 | */ |
768 | .type __v7_b15mp_proc_info, #object |
769 | __v7_b15mp_proc_info: |
770 | .long 0x420f00f0 |
771 | .long 0xff0ffff0 |
772 | __v7_proc __v7_b15mp_proc_info, __v7_b15mp_setup, proc_fns = ca15_processor_functions, cache_fns = b15_cache_fns |
773 | .size __v7_b15mp_proc_info, . - __v7_b15mp_proc_info |
774 | |
775 | /* |
776 | * ARM Ltd. Cortex A17 processor. |
777 | */ |
778 | .type __v7_ca17mp_proc_info, #object |
779 | __v7_ca17mp_proc_info: |
780 | .long 0x410fc0e0 |
781 | .long 0xff0ffff0 |
782 | __v7_proc __v7_ca17mp_proc_info, __v7_ca17mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS |
783 | .size __v7_ca17mp_proc_info, . - __v7_ca17mp_proc_info |
784 | |
785 | /* ARM Ltd. Cortex A73 processor */ |
786 | .type __v7_ca73_proc_info, #object |
787 | __v7_ca73_proc_info: |
788 | .long 0x410fd090 |
789 | .long 0xff0ffff0 |
790 | __v7_proc __v7_ca73_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS |
791 | .size __v7_ca73_proc_info, . - __v7_ca73_proc_info |
792 | |
793 | /* ARM Ltd. Cortex A75 processor */ |
794 | .type __v7_ca75_proc_info, #object |
795 | __v7_ca75_proc_info: |
796 | .long 0x410fd0a0 |
797 | .long 0xff0ffff0 |
798 | __v7_proc __v7_ca75_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS |
799 | .size __v7_ca75_proc_info, . - __v7_ca75_proc_info |
800 | |
801 | /* |
802 | * Qualcomm Inc. Krait processors. |
803 | */ |
804 | .type __krait_proc_info, #object |
805 | __krait_proc_info: |
806 | .long 0x510f0400 @ Required ID value |
807 | .long 0xff0ffc00 @ Mask for ID |
808 | /* |
809 | * Some Krait processors don't indicate support for SDIV and UDIV |
810 | * instructions in the ARM instruction set, even though they actually |
811 | * do support them. They also don't indicate support for fused multiply |
812 | * instructions even though they actually do support them. |
813 | */ |
814 | __v7_proc __krait_proc_info, __v7_setup, hwcaps = HWCAP_IDIV | HWCAP_VFPv4 |
815 | .size __krait_proc_info, . - __krait_proc_info |
816 | |
817 | /* |
818 | * Match any ARMv7 processor core. |
819 | */ |
820 | .type __v7_proc_info, #object |
821 | __v7_proc_info: |
822 | .long 0x000f0000 @ Required ID value |
823 | .long 0x000f0000 @ Mask for ID |
824 | __v7_proc __v7_proc_info, __v7_setup |
825 | .size __v7_proc_info, . - __v7_proc_info |
826 | |