blob: 22c28767d024678d72df0cc729fdde2d6b09cdcc [file] [log] [blame]
Jeenu Viswambharan58e81482018-04-27 15:06:57 +01001/*
Govindraj Rajaeee28e72023-08-01 15:52:40 -05002 * Copyright (c) 2018, Arm Limited and Contributors. All rights reserved.
Jeenu Viswambharan58e81482018-04-27 15:06:57 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <asm_macros.S>
8#include <assert_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <lib/xlat_tables/xlat_tables_v2.h>
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010010
Antonio Nino Diaz128de8d2018-08-07 19:59:49 +010011 .global enable_mmu_direct_svc_mon
12 .global enable_mmu_direct_hyp
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010013
Antonio Nino Diaz128de8d2018-08-07 19:59:49 +010014 /* void enable_mmu_direct_svc_mon(unsigned int flags) */
15func enable_mmu_direct_svc_mon
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010016 /* Assert that MMU is turned off */
17#if ENABLE_ASSERTIONS
18 ldcopr r1, SCTLR
19 tst r1, #SCTLR_M_BIT
20 ASM_ASSERT(eq)
21#endif
22
23 /* Invalidate TLB entries */
24 TLB_INVALIDATE(r0, TLBIALL)
25
26 mov r3, r0
27 ldr r0, =mmu_cfg_params
28
Antonio Nino Diaz668d9ee2018-07-12 15:44:42 +010029 /* MAIR0. Only the lower 32 bits are used. */
30 ldr r1, [r0, #(MMU_CFG_MAIR << 3)]
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010031 stcopr r1, MAIR0
32
Antonio Nino Diaz668d9ee2018-07-12 15:44:42 +010033 /* TTBCR. Only the lower 32 bits are used. */
34 ldr r2, [r0, #(MMU_CFG_TCR << 3)]
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010035 stcopr r2, TTBCR
36
37 /* TTBR0 */
Antonio Nino Diaz668d9ee2018-07-12 15:44:42 +010038 ldr r1, [r0, #(MMU_CFG_TTBR0 << 3)]
39 ldr r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010040 stcopr16 r1, r2, TTBR0_64
41
42 /* TTBR1 is unused right now; set it to 0. */
43 mov r1, #0
44 mov r2, #0
45 stcopr16 r1, r2, TTBR1_64
46
47 /*
48 * Ensure all translation table writes have drained into memory, the TLB
49 * invalidation is complete, and translation register writes are
50 * committed before enabling the MMU
51 */
52 dsb ish
53 isb
54
55 /* Enable enable MMU by honoring flags */
56 ldcopr r1, SCTLR
57 ldr r2, =(SCTLR_WXN_BIT | SCTLR_C_BIT | SCTLR_M_BIT)
58 orr r1, r1, r2
59
60 /* Clear C bit if requested */
61 tst r3, #DISABLE_DCACHE
62 bicne r1, r1, #SCTLR_C_BIT
63
64 stcopr r1, SCTLR
65 isb
66
67 bx lr
Antonio Nino Diaz128de8d2018-08-07 19:59:49 +010068endfunc enable_mmu_direct_svc_mon
69
70
71 /* void enable_mmu_direct_hyp(unsigned int flags) */
72func enable_mmu_direct_hyp
73 /* Assert that MMU is turned off */
74#if ENABLE_ASSERTIONS
75 ldcopr r1, HSCTLR
76 tst r1, #HSCTLR_M_BIT
77 ASM_ASSERT(eq)
78#endif
79
80 /* Invalidate TLB entries */
81 TLB_INVALIDATE(r0, TLBIALL)
82
83 mov r3, r0
84 ldr r0, =mmu_cfg_params
85
86 /* HMAIR0 */
87 ldr r1, [r0, #(MMU_CFG_MAIR << 3)]
88 stcopr r1, HMAIR0
89
90 /* HTCR */
91 ldr r2, [r0, #(MMU_CFG_TCR << 3)]
92 stcopr r2, HTCR
93
94 /* HTTBR */
95 ldr r1, [r0, #(MMU_CFG_TTBR0 << 3)]
96 ldr r2, [r0, #((MMU_CFG_TTBR0 << 3) + 4)]
97 stcopr16 r1, r2, HTTBR_64
98
99 /*
100 * Ensure all translation table writes have drained into memory, the TLB
101 * invalidation is complete, and translation register writes are
102 * committed before enabling the MMU
103 */
104 dsb ish
105 isb
106
107 /* Enable enable MMU by honoring flags */
108 ldcopr r1, HSCTLR
109 ldr r2, =(HSCTLR_WXN_BIT | HSCTLR_C_BIT | HSCTLR_M_BIT)
110 orr r1, r1, r2
111
112 /* Clear C bit if requested */
113 tst r3, #DISABLE_DCACHE
114 bicne r1, r1, #HSCTLR_C_BIT
115
116 stcopr r1, HSCTLR
117 isb
118
119 bx lr
120endfunc enable_mmu_direct_hyp