blob: 9277cc624cf1f94321ae992d557ad74cb3886b1c [file] [log] [blame]
Dimitris Papastamos570c06a2018-04-06 15:29:34 +01001/*
2 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
Dimitris Papastamos570c06a2018-04-06 15:29:34 +01008#include <asm_macros.S>
9#include <context.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000010#include <services/arm_arch_svc.h>
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010011
12 .globl wa_cve_2017_5715_mmu_vbar
13
14#define ESR_EL3_A64_SMC0 0x5e000000
Dimitris Papastamos66946332018-05-31 11:38:33 +010015#define ESR_EL3_A32_SMC0 0x4e000000
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010016
17vector_base wa_cve_2017_5715_mmu_vbar
18
Dimitris Papastamos66946332018-05-31 11:38:33 +010019 .macro apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010020 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
21 mrs x1, sctlr_el3
22 /* Disable MMU */
23 bic x1, x1, #SCTLR_M_BIT
24 msr sctlr_el3, x1
25 isb
26 /* Enable MMU */
27 orr x1, x1, #SCTLR_M_BIT
28 msr sctlr_el3, x1
29 /*
30 * Defer ISB to avoid synchronizing twice in case we hit
31 * the workaround SMC call which will implicitly synchronize
32 * because of the ERET instruction.
33 */
34
35 /*
Dimitris Papastamos66946332018-05-31 11:38:33 +010036 * Ensure SMC is coming from A64/A32 state on #0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010037 * with W0 = SMCCC_ARCH_WORKAROUND_1
38 *
39 * This sequence evaluates as:
40 * (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
41 * allowing use of a single branch operation
42 */
43 .if \_is_sync_exception
44 orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1
45 cmp w0, w1
46 mrs x0, esr_el3
Dimitris Papastamos66946332018-05-31 11:38:33 +010047 mov_imm w1, \_esr_el3_val
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010048 ccmp w0, w1, #0, eq
49 /* Static predictor will predict a fall through */
50 bne 1f
51 eret
521:
53 .endif
54
55 /*
56 * Synchronize now to enable the MMU. This is required
57 * to ensure the load pair below reads the data stored earlier.
58 */
59 isb
60 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
61 .endm
62
63 /* ---------------------------------------------------------------------
64 * Current EL with SP_EL0 : 0x0 - 0x200
65 * ---------------------------------------------------------------------
66 */
67vector_entry mmu_sync_exception_sp_el0
68 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010069end_vector_entry mmu_sync_exception_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010070
71vector_entry mmu_irq_sp_el0
72 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010073end_vector_entry mmu_irq_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010074
75vector_entry mmu_fiq_sp_el0
76 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010077end_vector_entry mmu_fiq_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010078
79vector_entry mmu_serror_sp_el0
80 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010081end_vector_entry mmu_serror_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010082
83 /* ---------------------------------------------------------------------
84 * Current EL with SP_ELx: 0x200 - 0x400
85 * ---------------------------------------------------------------------
86 */
87vector_entry mmu_sync_exception_sp_elx
88 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010089end_vector_entry mmu_sync_exception_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010090
91vector_entry mmu_irq_sp_elx
92 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010093end_vector_entry mmu_irq_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010094
95vector_entry mmu_fiq_sp_elx
96 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010097end_vector_entry mmu_fiq_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010098
99vector_entry mmu_serror_sp_elx
100 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100101end_vector_entry mmu_serror_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100102
103 /* ---------------------------------------------------------------------
104 * Lower EL using AArch64 : 0x400 - 0x600
105 * ---------------------------------------------------------------------
106 */
107vector_entry mmu_sync_exception_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100108 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100109 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100110end_vector_entry mmu_sync_exception_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100111
112vector_entry mmu_irq_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100113 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100114 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100115end_vector_entry mmu_irq_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100116
117vector_entry mmu_fiq_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100118 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100119 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100120end_vector_entry mmu_fiq_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100121
122vector_entry mmu_serror_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100123 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100124 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100125end_vector_entry mmu_serror_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100126
127 /* ---------------------------------------------------------------------
128 * Lower EL using AArch32 : 0x600 - 0x800
129 * ---------------------------------------------------------------------
130 */
131vector_entry mmu_sync_exception_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100132 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100133 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100134end_vector_entry mmu_sync_exception_aarch32
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100135
136vector_entry mmu_irq_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100137 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100138 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100139end_vector_entry mmu_irq_aarch32
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100140
141vector_entry mmu_fiq_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100142 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100143 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100144end_vector_entry mmu_fiq_aarch32
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100145
146vector_entry mmu_serror_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100147 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100148 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100149end_vector_entry mmu_serror_aarch32