blob: a556d1fee0e4e1ab128512bb39e2edd4d283df0c [file] [log] [blame]
Dimitris Papastamos570c06a2018-04-06 15:29:34 +01001/*
2 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <arm_arch_svc.h>
9#include <asm_macros.S>
10#include <context.h>
11
12 .globl wa_cve_2017_5715_mmu_vbar
13
14#define ESR_EL3_A64_SMC0 0x5e000000
Dimitris Papastamos66946332018-05-31 11:38:33 +010015#define ESR_EL3_A32_SMC0 0x4e000000
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010016
17vector_base wa_cve_2017_5715_mmu_vbar
18
Dimitris Papastamos66946332018-05-31 11:38:33 +010019 .macro apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010020 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
21 mrs x1, sctlr_el3
22 /* Disable MMU */
23 bic x1, x1, #SCTLR_M_BIT
24 msr sctlr_el3, x1
25 isb
26 /* Enable MMU */
27 orr x1, x1, #SCTLR_M_BIT
28 msr sctlr_el3, x1
29 /*
30 * Defer ISB to avoid synchronizing twice in case we hit
31 * the workaround SMC call which will implicitly synchronize
32 * because of the ERET instruction.
33 */
34
35 /*
Dimitris Papastamos66946332018-05-31 11:38:33 +010036 * Ensure SMC is coming from A64/A32 state on #0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010037 * with W0 = SMCCC_ARCH_WORKAROUND_1
38 *
39 * This sequence evaluates as:
40 * (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
41 * allowing use of a single branch operation
42 */
43 .if \_is_sync_exception
44 orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1
45 cmp w0, w1
46 mrs x0, esr_el3
Dimitris Papastamos66946332018-05-31 11:38:33 +010047 mov_imm w1, \_esr_el3_val
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010048 ccmp w0, w1, #0, eq
49 /* Static predictor will predict a fall through */
50 bne 1f
51 eret
521:
53 .endif
54
55 /*
56 * Synchronize now to enable the MMU. This is required
57 * to ensure the load pair below reads the data stored earlier.
58 */
59 isb
60 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
61 .endm
62
63 /* ---------------------------------------------------------------------
64 * Current EL with SP_EL0 : 0x0 - 0x200
65 * ---------------------------------------------------------------------
66 */
67vector_entry mmu_sync_exception_sp_el0
68 b sync_exception_sp_el0
69 check_vector_size mmu_sync_exception_sp_el0
70
71vector_entry mmu_irq_sp_el0
72 b irq_sp_el0
73 check_vector_size mmu_irq_sp_el0
74
75vector_entry mmu_fiq_sp_el0
76 b fiq_sp_el0
77 check_vector_size mmu_fiq_sp_el0
78
79vector_entry mmu_serror_sp_el0
80 b serror_sp_el0
81 check_vector_size mmu_serror_sp_el0
82
83 /* ---------------------------------------------------------------------
84 * Current EL with SP_ELx: 0x200 - 0x400
85 * ---------------------------------------------------------------------
86 */
87vector_entry mmu_sync_exception_sp_elx
88 b sync_exception_sp_elx
89 check_vector_size mmu_sync_exception_sp_elx
90
91vector_entry mmu_irq_sp_elx
92 b irq_sp_elx
93 check_vector_size mmu_irq_sp_elx
94
95vector_entry mmu_fiq_sp_elx
96 b fiq_sp_elx
97 check_vector_size mmu_fiq_sp_elx
98
99vector_entry mmu_serror_sp_elx
100 b serror_sp_elx
101 check_vector_size mmu_serror_sp_elx
102
103 /* ---------------------------------------------------------------------
104 * Lower EL using AArch64 : 0x400 - 0x600
105 * ---------------------------------------------------------------------
106 */
107vector_entry mmu_sync_exception_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100108 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100109 b sync_exception_aarch64
110 check_vector_size mmu_sync_exception_aarch64
111
112vector_entry mmu_irq_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100113 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100114 b irq_aarch64
115 check_vector_size mmu_irq_aarch64
116
117vector_entry mmu_fiq_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100118 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100119 b fiq_aarch64
120 check_vector_size mmu_fiq_aarch64
121
122vector_entry mmu_serror_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100123 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100124 b serror_aarch64
125 check_vector_size mmu_serror_aarch64
126
127 /* ---------------------------------------------------------------------
128 * Lower EL using AArch32 : 0x600 - 0x800
129 * ---------------------------------------------------------------------
130 */
131vector_entry mmu_sync_exception_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100132 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100133 b sync_exception_aarch32
134 check_vector_size mmu_sync_exception_aarch32
135
136vector_entry mmu_irq_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100137 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100138 b irq_aarch32
139 check_vector_size mmu_irq_aarch32
140
141vector_entry mmu_fiq_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100142 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100143 b fiq_aarch32
144 check_vector_size mmu_fiq_aarch32
145
146vector_entry mmu_serror_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100147 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100148 b serror_aarch32
149 check_vector_size mmu_serror_aarch32