1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2 | /* |
3 | * linux/arch/arm/kernel/head-nommu.S |
4 | * |
5 | * Copyright (C) 1994-2002 Russell King |
6 | * Copyright (C) 2003-2006 Hyok S. Choi |
7 | * |
8 | * Common kernel startup code (non-paged MM) |
9 | */ |
10 | #include <linux/linkage.h> |
11 | #include <linux/init.h> |
12 | #include <linux/errno.h> |
13 | |
14 | #include <asm/assembler.h> |
15 | #include <asm/ptrace.h> |
16 | #include <asm/asm-offsets.h> |
17 | #include <asm/page.h> |
18 | #include <asm/cp15.h> |
19 | #include <asm/thread_info.h> |
20 | #include <asm/v7m.h> |
21 | #include <asm/mpu.h> |
22 | |
23 | /* |
24 | * Kernel startup entry point. |
25 | * --------------------------- |
26 | * |
27 | * This is normally called from the decompressor code. The requirements |
28 | * are: MMU = off, D-cache = off, I-cache = dont care, r0 = 0, |
29 | * r1 = machine nr. |
30 | * |
31 | * See linux/arch/arm/tools/mach-types for the complete list of machine |
32 | * numbers for r1. |
33 | * |
34 | */ |
35 | |
36 | __HEAD |
37 | |
38 | #ifdef CONFIG_CPU_THUMBONLY |
39 | .thumb |
40 | ENTRY(stext) |
41 | #else |
42 | .arm |
43 | ENTRY(stext) |
44 | |
45 | THUMB( badr r9, 1f ) @ Kernel is always entered in ARM. |
46 | THUMB( bx r9 ) @ If this is a Thumb-2 kernel, |
47 | THUMB( .thumb ) @ switch to Thumb now. |
48 | THUMB(1: ) |
49 | #endif |
50 | |
51 | #ifdef CONFIG_ARM_VIRT_EXT |
52 | bl __hyp_stub_install |
53 | #endif |
54 | @ ensure svc mode and all interrupts masked |
55 | safe_svcmode_maskall r9 |
56 | @ and irqs disabled |
57 | #if defined(CONFIG_CPU_CP15) |
58 | mrc p15, 0, r9, c0, c0 @ get processor id |
59 | #elif defined(CONFIG_CPU_V7M) |
60 | ldr r9, =BASEADDR_V7M_SCB |
61 | ldr r9, [r9, V7M_SCB_CPUID] |
62 | #else |
63 | ldr r9, =CONFIG_PROCESSOR_ID |
64 | #endif |
65 | bl __lookup_processor_type @ r5=procinfo r9=cpuid |
66 | movs r10, r5 @ invalid processor (r5=0)? |
67 | beq __error_p @ yes, error 'p' |
68 | |
69 | #ifdef CONFIG_ARM_MPU |
70 | bl __setup_mpu |
71 | #endif |
72 | |
73 | badr lr, 1f @ return (PIC) address |
74 | ldr r12, [r10, #PROCINFO_INITFUNC] |
75 | add r12, r12, r10 |
76 | ret r12 |
77 | 1: ldr lr, =__mmap_switched |
78 | b __after_proc_init |
79 | ENDPROC(stext) |
80 | |
81 | #ifdef CONFIG_SMP |
82 | .text |
83 | ENTRY(secondary_startup) |
84 | /* |
85 | * Common entry point for secondary CPUs. |
86 | * |
87 | * Ensure that we're in SVC mode, and IRQs are disabled. Lookup |
88 | * the processor type - there is no need to check the machine type |
89 | * as it has already been validated by the primary processor. |
90 | */ |
91 | #ifdef CONFIG_ARM_VIRT_EXT |
92 | bl __hyp_stub_install_secondary |
93 | #endif |
94 | safe_svcmode_maskall r9 |
95 | |
96 | #ifndef CONFIG_CPU_CP15 |
97 | ldr r9, =CONFIG_PROCESSOR_ID |
98 | #else |
99 | mrc p15, 0, r9, c0, c0 @ get processor id |
100 | #endif |
101 | bl __lookup_processor_type @ r5=procinfo r9=cpuid |
102 | movs r10, r5 @ invalid processor? |
103 | beq __error_p @ yes, error 'p' |
104 | |
105 | ldr r7, __secondary_data |
106 | |
107 | #ifdef CONFIG_ARM_MPU |
108 | bl __secondary_setup_mpu @ Initialize the MPU |
109 | #endif |
110 | |
111 | badr lr, 1f @ return (PIC) address |
112 | ldr r12, [r10, #PROCINFO_INITFUNC] |
113 | add r12, r12, r10 |
114 | ret r12 |
115 | 1: bl __after_proc_init |
116 | ldr r7, __secondary_data @ reload r7 |
117 | ldr sp, [r7, #12] @ set up the stack pointer |
118 | ldr r0, [r7, #16] @ set up task pointer |
119 | mov fp, #0 |
120 | b secondary_start_kernel |
121 | ENDPROC(secondary_startup) |
122 | |
123 | .type __secondary_data, %object |
124 | __secondary_data: |
125 | .long secondary_data |
126 | #endif /* CONFIG_SMP */ |
127 | |
128 | /* |
129 | * Set the Control Register and Read the process ID. |
130 | */ |
131 | .text |
132 | __after_proc_init: |
133 | M_CLASS(movw r12, #:lower16:BASEADDR_V7M_SCB) |
134 | M_CLASS(movt r12, #:upper16:BASEADDR_V7M_SCB) |
135 | #ifdef CONFIG_ARM_MPU |
136 | M_CLASS(ldr r3, [r12, 0x50]) |
137 | AR_CLASS(mrc p15, 0, r3, c0, c1, 4) @ Read ID_MMFR0 |
138 | and r3, r3, #(MMFR0_PMSA) @ PMSA field |
139 | teq r3, #(MMFR0_PMSAv7) @ PMSA v7 |
140 | beq 1f |
141 | teq r3, #(MMFR0_PMSAv8) @ PMSA v8 |
142 | /* |
143 | * Memory region attributes for PMSAv8: |
144 | * |
145 | * n = AttrIndx[2:0] |
146 | * n MAIR |
147 | * DEVICE_nGnRnE 000 00000000 |
148 | * NORMAL 001 11111111 |
149 | */ |
150 | ldreq r3, =PMSAv8_MAIR(0x00, PMSAv8_RGN_DEVICE_nGnRnE) | \ |
151 | PMSAv8_MAIR(0xff, PMSAv8_RGN_NORMAL) |
152 | AR_CLASS(mcreq p15, 0, r3, c10, c2, 0) @ MAIR 0 |
153 | M_CLASS(streq r3, [r12, #PMSAv8_MAIR0]) |
154 | moveq r3, #0 |
155 | AR_CLASS(mcreq p15, 0, r3, c10, c2, 1) @ MAIR 1 |
156 | M_CLASS(streq r3, [r12, #PMSAv8_MAIR1]) |
157 | |
158 | 1: |
159 | #endif |
160 | #ifdef CONFIG_CPU_CP15 |
161 | /* |
162 | * CP15 system control register value returned in r0 from |
163 | * the CPU init function. |
164 | */ |
165 | |
166 | #ifdef CONFIG_ARM_MPU |
167 | biceq r0, r0, #CR_BR @ Disable the 'default mem-map' |
168 | orreq r0, r0, #CR_M @ Set SCTRL.M (MPU on) |
169 | #endif |
170 | #if defined(CONFIG_ALIGNMENT_TRAP) && __LINUX_ARM_ARCH__ < 6 |
171 | orr r0, r0, #CR_A |
172 | #else |
173 | bic r0, r0, #CR_A |
174 | #endif |
175 | #ifdef CONFIG_CPU_DCACHE_DISABLE |
176 | bic r0, r0, #CR_C |
177 | #endif |
178 | #ifdef CONFIG_CPU_BPREDICT_DISABLE |
179 | bic r0, r0, #CR_Z |
180 | #endif |
181 | #ifdef CONFIG_CPU_ICACHE_DISABLE |
182 | bic r0, r0, #CR_I |
183 | #endif |
184 | mcr p15, 0, r0, c1, c0, 0 @ write control reg |
185 | instr_sync |
186 | #elif defined (CONFIG_CPU_V7M) |
187 | #ifdef CONFIG_ARM_MPU |
188 | ldreq r3, [r12, MPU_CTRL] |
189 | biceq r3, #MPU_CTRL_PRIVDEFENA |
190 | orreq r3, #MPU_CTRL_ENABLE |
191 | streq r3, [r12, MPU_CTRL] |
192 | isb |
193 | #endif |
194 | /* For V7M systems we want to modify the CCR similarly to the SCTLR */ |
195 | #ifdef CONFIG_CPU_DCACHE_DISABLE |
196 | bic r0, r0, #V7M_SCB_CCR_DC |
197 | #endif |
198 | #ifdef CONFIG_CPU_BPREDICT_DISABLE |
199 | bic r0, r0, #V7M_SCB_CCR_BP |
200 | #endif |
201 | #ifdef CONFIG_CPU_ICACHE_DISABLE |
202 | bic r0, r0, #V7M_SCB_CCR_IC |
203 | #endif |
204 | str r0, [r12, V7M_SCB_CCR] |
205 | /* Pass exc_ret to __mmap_switched */ |
206 | mov r0, r10 |
207 | #endif /* CONFIG_CPU_CP15 elif CONFIG_CPU_V7M */ |
208 | ret lr |
209 | ENDPROC(__after_proc_init) |
210 | .ltorg |
211 | |
212 | #ifdef CONFIG_ARM_MPU |
213 | |
214 | |
215 | #ifndef CONFIG_CPU_V7M |
216 | /* Set which MPU region should be programmed */ |
217 | .macro set_region_nr tmp, rgnr, unused |
218 | mov \tmp, \rgnr @ Use static region numbers |
219 | mcr p15, 0, \tmp, c6, c2, 0 @ Write RGNR |
220 | .endm |
221 | |
222 | /* Setup a single MPU region, either D or I side (D-side for unified) */ |
223 | .macro setup_region bar, acr, sr, side = PMSAv7_DATA_SIDE, unused |
224 | mcr p15, 0, \bar, c6, c1, (0 + \side) @ I/DRBAR |
225 | mcr p15, 0, \acr, c6, c1, (4 + \side) @ I/DRACR |
226 | mcr p15, 0, \sr, c6, c1, (2 + \side) @ I/DRSR |
227 | .endm |
228 | #else |
229 | .macro set_region_nr tmp, rgnr, base |
230 | mov \tmp, \rgnr |
231 | str \tmp, [\base, #PMSAv7_RNR] |
232 | .endm |
233 | |
234 | .macro setup_region bar, acr, sr, unused, base |
235 | lsl \acr, \acr, #16 |
236 | orr \acr, \acr, \sr |
237 | str \bar, [\base, #PMSAv7_RBAR] |
238 | str \acr, [\base, #PMSAv7_RASR] |
239 | .endm |
240 | |
241 | #endif |
242 | /* |
243 | * Setup the MPU and initial MPU Regions. We create the following regions: |
244 | * Region 0: Use this for probing the MPU details, so leave disabled. |
245 | * Region 1: Background region - covers the whole of RAM as strongly ordered |
246 | * Region 2: Normal, Shared, cacheable for RAM. From PHYS_OFFSET, size from r6 |
247 | * Region 3: Normal, shared, inaccessible from PL0 to protect the vectors page |
248 | * |
249 | * r6: Value to be written to DRSR (and IRSR if required) for PMSAv7_RAM_REGION |
250 | */ |
251 | __HEAD |
252 | |
253 | ENTRY(__setup_mpu) |
254 | |
255 | /* Probe for v7 PMSA compliance */ |
256 | M_CLASS(movw r12, #:lower16:BASEADDR_V7M_SCB) |
257 | M_CLASS(movt r12, #:upper16:BASEADDR_V7M_SCB) |
258 | |
259 | AR_CLASS(mrc p15, 0, r0, c0, c1, 4) @ Read ID_MMFR0 |
260 | M_CLASS(ldr r0, [r12, 0x50]) |
261 | and r0, r0, #(MMFR0_PMSA) @ PMSA field |
262 | teq r0, #(MMFR0_PMSAv7) @ PMSA v7 |
263 | beq __setup_pmsa_v7 |
264 | teq r0, #(MMFR0_PMSAv8) @ PMSA v8 |
265 | beq __setup_pmsa_v8 |
266 | |
267 | ret lr |
268 | ENDPROC(__setup_mpu) |
269 | |
270 | ENTRY(__setup_pmsa_v7) |
271 | /* Calculate the size of a region covering just the kernel */ |
272 | ldr r5, =PLAT_PHYS_OFFSET @ Region start: PHYS_OFFSET |
273 | ldr r6, =(_end) @ Cover whole kernel |
274 | sub r6, r6, r5 @ Minimum size of region to map |
275 | clz r6, r6 @ Region size must be 2^N... |
276 | rsb r6, r6, #31 @ ...so round up region size |
277 | lsl r6, r6, #PMSAv7_RSR_SZ @ Put size in right field |
278 | orr r6, r6, #(1 << PMSAv7_RSR_EN) @ Set region enabled bit |
279 | |
280 | /* Determine whether the D/I-side memory map is unified. We set the |
281 | * flags here and continue to use them for the rest of this function */ |
282 | AR_CLASS(mrc p15, 0, r0, c0, c0, 4) @ MPUIR |
283 | M_CLASS(ldr r0, [r12, #MPU_TYPE]) |
284 | ands r5, r0, #MPUIR_DREGION_SZMASK @ 0 size d region => No MPU |
285 | bxeq lr |
286 | tst r0, #MPUIR_nU @ MPUIR_nU = 0 for unified |
287 | |
288 | /* Setup second region first to free up r6 */ |
289 | set_region_nr r0, #PMSAv7_RAM_REGION, r12 |
290 | isb |
291 | /* Full access from PL0, PL1, shared for CONFIG_SMP, cacheable */ |
292 | ldr r0, =PLAT_PHYS_OFFSET @ RAM starts at PHYS_OFFSET |
293 | ldr r5,=(PMSAv7_AP_PL1RW_PL0RW | PMSAv7_RGN_NORMAL) |
294 | |
295 | setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ PHYS_OFFSET, shared, enabled |
296 | beq 1f @ Memory-map not unified |
297 | setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ PHYS_OFFSET, shared, enabled |
298 | 1: isb |
299 | |
300 | /* First/background region */ |
301 | set_region_nr r0, #PMSAv7_BG_REGION, r12 |
302 | isb |
303 | /* Execute Never, strongly ordered, inaccessible to PL0, rw PL1 */ |
304 | mov r0, #0 @ BG region starts at 0x0 |
305 | ldr r5,=(PMSAv7_ACR_XN | PMSAv7_RGN_STRONGLY_ORDERED | PMSAv7_AP_PL1RW_PL0NA) |
306 | mov r6, #PMSAv7_RSR_ALL_MEM @ 4GB region, enabled |
307 | |
308 | setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ 0x0, BG region, enabled |
309 | beq 2f @ Memory-map not unified |
310 | setup_region r0, r5, r6, PMSAv7_INSTR_SIDE r12 @ 0x0, BG region, enabled |
311 | 2: isb |
312 | |
313 | #ifdef CONFIG_XIP_KERNEL |
314 | set_region_nr r0, #PMSAv7_ROM_REGION, r12 |
315 | isb |
316 | |
317 | ldr r5,=(PMSAv7_AP_PL1RO_PL0NA | PMSAv7_RGN_NORMAL) |
318 | |
319 | ldr r0, =CONFIG_XIP_PHYS_ADDR @ ROM start |
320 | ldr r6, =(_exiprom) @ ROM end |
321 | sub r6, r6, r0 @ Minimum size of region to map |
322 | clz r6, r6 @ Region size must be 2^N... |
323 | rsb r6, r6, #31 @ ...so round up region size |
324 | lsl r6, r6, #PMSAv7_RSR_SZ @ Put size in right field |
325 | orr r6, r6, #(1 << PMSAv7_RSR_EN) @ Set region enabled bit |
326 | |
327 | setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled |
328 | beq 3f @ Memory-map not unified |
329 | setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled |
330 | 3: isb |
331 | #endif |
332 | ret lr |
333 | ENDPROC(__setup_pmsa_v7) |
334 | |
335 | ENTRY(__setup_pmsa_v8) |
336 | mov r0, #0 |
337 | AR_CLASS(mcr p15, 0, r0, c6, c2, 1) @ PRSEL |
338 | M_CLASS(str r0, [r12, #PMSAv8_RNR]) |
339 | isb |
340 | |
341 | #ifdef CONFIG_XIP_KERNEL |
342 | ldr r5, =CONFIG_XIP_PHYS_ADDR @ ROM start |
343 | ldr r6, =(_exiprom) @ ROM end |
344 | sub r6, r6, #1 |
345 | bic r6, r6, #(PMSAv8_MINALIGN - 1) |
346 | |
347 | orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED) |
348 | orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_NORMAL) | PMSAv8_LAR_EN) |
349 | |
350 | AR_CLASS(mcr p15, 0, r5, c6, c8, 0) @ PRBAR0 |
351 | AR_CLASS(mcr p15, 0, r6, c6, c8, 1) @ PRLAR0 |
352 | M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(0)]) |
353 | M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(0)]) |
354 | #endif |
355 | |
356 | ldr r5, =KERNEL_START |
357 | ldr r6, =KERNEL_END |
358 | sub r6, r6, #1 |
359 | bic r6, r6, #(PMSAv8_MINALIGN - 1) |
360 | |
361 | orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED) |
362 | orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_NORMAL) | PMSAv8_LAR_EN) |
363 | |
364 | AR_CLASS(mcr p15, 0, r5, c6, c8, 4) @ PRBAR1 |
365 | AR_CLASS(mcr p15, 0, r6, c6, c8, 5) @ PRLAR1 |
366 | M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(1)]) |
367 | M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(1)]) |
368 | |
369 | /* Setup Background: 0x0 - min(KERNEL_START, XIP_PHYS_ADDR) */ |
370 | #ifdef CONFIG_XIP_KERNEL |
371 | ldr r6, =KERNEL_START |
372 | ldr r5, =CONFIG_XIP_PHYS_ADDR |
373 | cmp r6, r5 |
374 | movcs r6, r5 |
375 | #else |
376 | ldr r6, =KERNEL_START |
377 | #endif |
378 | cmp r6, #0 |
379 | beq 1f |
380 | |
381 | mov r5, #0 |
382 | sub r6, r6, #1 |
383 | bic r6, r6, #(PMSAv8_MINALIGN - 1) |
384 | |
385 | orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN) |
386 | orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN) |
387 | |
388 | AR_CLASS(mcr p15, 0, r5, c6, c9, 0) @ PRBAR2 |
389 | AR_CLASS(mcr p15, 0, r6, c6, c9, 1) @ PRLAR2 |
390 | M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(2)]) |
391 | M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(2)]) |
392 | |
393 | 1: |
394 | /* Setup Background: max(KERNEL_END, _exiprom) - 0xffffffff */ |
395 | #ifdef CONFIG_XIP_KERNEL |
396 | ldr r5, =KERNEL_END |
397 | ldr r6, =(_exiprom) |
398 | cmp r5, r6 |
399 | movcc r5, r6 |
400 | #else |
401 | ldr r5, =KERNEL_END |
402 | #endif |
403 | mov r6, #0xffffffff |
404 | bic r6, r6, #(PMSAv8_MINALIGN - 1) |
405 | |
406 | orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN) |
407 | orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN) |
408 | |
409 | AR_CLASS(mcr p15, 0, r5, c6, c9, 4) @ PRBAR3 |
410 | AR_CLASS(mcr p15, 0, r6, c6, c9, 5) @ PRLAR3 |
411 | M_CLASS(str r5, [r12, #PMSAv8_RBAR_A(3)]) |
412 | M_CLASS(str r6, [r12, #PMSAv8_RLAR_A(3)]) |
413 | |
414 | #ifdef CONFIG_XIP_KERNEL |
415 | /* Setup Background: min(_exiprom, KERNEL_END) - max(KERNEL_START, XIP_PHYS_ADDR) */ |
416 | ldr r5, =(_exiprom) |
417 | ldr r6, =KERNEL_END |
418 | cmp r5, r6 |
419 | movcs r5, r6 |
420 | |
421 | ldr r6, =KERNEL_START |
422 | ldr r0, =CONFIG_XIP_PHYS_ADDR |
423 | cmp r6, r0 |
424 | movcc r6, r0 |
425 | |
426 | sub r6, r6, #1 |
427 | bic r6, r6, #(PMSAv8_MINALIGN - 1) |
428 | |
429 | orr r5, r5, #(PMSAv8_AP_PL1RW_PL0NA | PMSAv8_RGN_SHARED | PMSAv8_BAR_XN) |
430 | orr r6, r6, #(PMSAv8_LAR_IDX(PMSAv8_RGN_DEVICE_nGnRnE) | PMSAv8_LAR_EN) |
431 | |
432 | #ifdef CONFIG_CPU_V7M |
433 | /* There is no alias for n == 4 */ |
434 | mov r0, #4 |
435 | str r0, [r12, #PMSAv8_RNR] @ PRSEL |
436 | isb |
437 | |
438 | str r5, [r12, #PMSAv8_RBAR_A(0)] |
439 | str r6, [r12, #PMSAv8_RLAR_A(0)] |
440 | #else |
441 | mcr p15, 0, r5, c6, c10, 0 @ PRBAR4 |
442 | mcr p15, 0, r6, c6, c10, 1 @ PRLAR4 |
443 | #endif |
444 | #endif |
445 | ret lr |
446 | ENDPROC(__setup_pmsa_v8) |
447 | |
448 | #ifdef CONFIG_SMP |
449 | /* |
450 | * r6: pointer at mpu_rgn_info |
451 | */ |
452 | |
453 | .text |
454 | ENTRY(__secondary_setup_mpu) |
455 | /* Use MPU region info supplied by __cpu_up */ |
456 | ldr r6, [r7] @ get secondary_data.mpu_rgn_info |
457 | |
458 | /* Probe for v7 PMSA compliance */ |
459 | mrc p15, 0, r0, c0, c1, 4 @ Read ID_MMFR0 |
460 | and r0, r0, #(MMFR0_PMSA) @ PMSA field |
461 | teq r0, #(MMFR0_PMSAv7) @ PMSA v7 |
462 | beq __secondary_setup_pmsa_v7 |
463 | teq r0, #(MMFR0_PMSAv8) @ PMSA v8 |
464 | beq __secondary_setup_pmsa_v8 |
465 | b __error_p |
466 | ENDPROC(__secondary_setup_mpu) |
467 | |
468 | /* |
469 | * r6: pointer at mpu_rgn_info |
470 | */ |
471 | ENTRY(__secondary_setup_pmsa_v7) |
472 | /* Determine whether the D/I-side memory map is unified. We set the |
473 | * flags here and continue to use them for the rest of this function */ |
474 | mrc p15, 0, r0, c0, c0, 4 @ MPUIR |
475 | ands r5, r0, #MPUIR_DREGION_SZMASK @ 0 size d region => No MPU |
476 | beq __error_p |
477 | |
478 | ldr r4, [r6, #MPU_RNG_INFO_USED] |
479 | mov r5, #MPU_RNG_SIZE |
480 | add r3, r6, #MPU_RNG_INFO_RNGS |
481 | mla r3, r4, r5, r3 |
482 | |
483 | 1: |
484 | tst r0, #MPUIR_nU @ MPUIR_nU = 0 for unified |
485 | sub r3, r3, #MPU_RNG_SIZE |
486 | sub r4, r4, #1 |
487 | |
488 | set_region_nr r0, r4 |
489 | isb |
490 | |
491 | ldr r0, [r3, #MPU_RGN_DRBAR] |
492 | ldr r6, [r3, #MPU_RGN_DRSR] |
493 | ldr r5, [r3, #MPU_RGN_DRACR] |
494 | |
495 | setup_region r0, r5, r6, PMSAv7_DATA_SIDE |
496 | beq 2f |
497 | setup_region r0, r5, r6, PMSAv7_INSTR_SIDE |
498 | 2: isb |
499 | |
500 | mrc p15, 0, r0, c0, c0, 4 @ Reevaluate the MPUIR |
501 | cmp r4, #0 |
502 | bgt 1b |
503 | |
504 | ret lr |
505 | ENDPROC(__secondary_setup_pmsa_v7) |
506 | |
507 | ENTRY(__secondary_setup_pmsa_v8) |
508 | ldr r4, [r6, #MPU_RNG_INFO_USED] |
509 | #ifndef CONFIG_XIP_KERNEL |
510 | add r4, r4, #1 |
511 | #endif |
512 | mov r5, #MPU_RNG_SIZE |
513 | add r3, r6, #MPU_RNG_INFO_RNGS |
514 | mla r3, r4, r5, r3 |
515 | |
516 | 1: |
517 | sub r3, r3, #MPU_RNG_SIZE |
518 | sub r4, r4, #1 |
519 | |
520 | mcr p15, 0, r4, c6, c2, 1 @ PRSEL |
521 | isb |
522 | |
523 | ldr r5, [r3, #MPU_RGN_PRBAR] |
524 | ldr r6, [r3, #MPU_RGN_PRLAR] |
525 | |
526 | mcr p15, 0, r5, c6, c3, 0 @ PRBAR |
527 | mcr p15, 0, r6, c6, c3, 1 @ PRLAR |
528 | |
529 | cmp r4, #0 |
530 | bgt 1b |
531 | |
532 | ret lr |
533 | ENDPROC(__secondary_setup_pmsa_v8) |
534 | #endif /* CONFIG_SMP */ |
535 | #endif /* CONFIG_ARM_MPU */ |
536 | #include "head-common.S" |
537 | |