blob: 823f849cb126a62aa9bf9ba6296c87b94522d331 [file] [log] [blame]
Dimitris Papastamos570c06a2018-04-06 15:29:34 +01001/*
Govindraj Rajaeee28e72023-08-01 15:52:40 -05002 * Copyright (c) 2017-2022, Arm Limited and Contributors. All rights reserved.
Dimitris Papastamos570c06a2018-04-06 15:29:34 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
Dimitris Papastamos570c06a2018-04-06 15:29:34 +01008#include <asm_macros.S>
9#include <context.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000010#include <services/arm_arch_svc.h>
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010011
12 .globl wa_cve_2017_5715_mmu_vbar
13
14#define ESR_EL3_A64_SMC0 0x5e000000
Dimitris Papastamos66946332018-05-31 11:38:33 +010015#define ESR_EL3_A32_SMC0 0x4e000000
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010016
17vector_base wa_cve_2017_5715_mmu_vbar
18
Dimitris Papastamos66946332018-05-31 11:38:33 +010019 .macro apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010020 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
21 mrs x1, sctlr_el3
22 /* Disable MMU */
23 bic x1, x1, #SCTLR_M_BIT
24 msr sctlr_el3, x1
25 isb
26 /* Enable MMU */
27 orr x1, x1, #SCTLR_M_BIT
28 msr sctlr_el3, x1
29 /*
30 * Defer ISB to avoid synchronizing twice in case we hit
31 * the workaround SMC call which will implicitly synchronize
32 * because of the ERET instruction.
33 */
34
35 /*
Dimitris Papastamos66946332018-05-31 11:38:33 +010036 * Ensure SMC is coming from A64/A32 state on #0
Bipin Ravicaa2e052022-02-23 23:45:50 -060037 * with W0 = SMCCC_ARCH_WORKAROUND_1 or W0 = SMCCC_ARCH_WORKAROUND_3
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010038 *
39 * This sequence evaluates as:
Bipin Ravicaa2e052022-02-23 23:45:50 -060040 * (W0==SMCCC_ARCH_WORKAROUND_1) || (W0==SMCCC_ARCH_WORKAROUND_3) ?
41 * (ESR_EL3==SMC#0) : (NE)
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010042 * allowing use of a single branch operation
43 */
44 .if \_is_sync_exception
45 orr w1, wzr, #SMCCC_ARCH_WORKAROUND_1
46 cmp w0, w1
Bipin Ravicaa2e052022-02-23 23:45:50 -060047 orr w1, wzr, #SMCCC_ARCH_WORKAROUND_3
48 ccmp w0, w1, #4, ne
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010049 mrs x0, esr_el3
Dimitris Papastamos66946332018-05-31 11:38:33 +010050 mov_imm w1, \_esr_el3_val
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010051 ccmp w0, w1, #0, eq
52 /* Static predictor will predict a fall through */
53 bne 1f
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -080054 exception_return
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100551:
56 .endif
57
58 /*
59 * Synchronize now to enable the MMU. This is required
60 * to ensure the load pair below reads the data stored earlier.
61 */
62 isb
63 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
64 .endm
65
66 /* ---------------------------------------------------------------------
67 * Current EL with SP_EL0 : 0x0 - 0x200
68 * ---------------------------------------------------------------------
69 */
70vector_entry mmu_sync_exception_sp_el0
71 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010072end_vector_entry mmu_sync_exception_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010073
74vector_entry mmu_irq_sp_el0
75 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010076end_vector_entry mmu_irq_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010077
78vector_entry mmu_fiq_sp_el0
79 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010080end_vector_entry mmu_fiq_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010081
82vector_entry mmu_serror_sp_el0
83 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010084end_vector_entry mmu_serror_sp_el0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010085
86 /* ---------------------------------------------------------------------
87 * Current EL with SP_ELx: 0x200 - 0x400
88 * ---------------------------------------------------------------------
89 */
90vector_entry mmu_sync_exception_sp_elx
91 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010092end_vector_entry mmu_sync_exception_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010093
94vector_entry mmu_irq_sp_elx
95 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010096end_vector_entry mmu_irq_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010097
98vector_entry mmu_fiq_sp_elx
99 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100100end_vector_entry mmu_fiq_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100101
102vector_entry mmu_serror_sp_elx
103 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100104end_vector_entry mmu_serror_sp_elx
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100105
106 /* ---------------------------------------------------------------------
107 * Lower EL using AArch64 : 0x400 - 0x600
108 * ---------------------------------------------------------------------
109 */
110vector_entry mmu_sync_exception_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100111 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100112 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100113end_vector_entry mmu_sync_exception_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100114
115vector_entry mmu_irq_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100116 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100117 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100118end_vector_entry mmu_irq_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100119
120vector_entry mmu_fiq_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100121 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100122 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100123end_vector_entry mmu_fiq_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100124
125vector_entry mmu_serror_aarch64
Dimitris Papastamos66946332018-05-31 11:38:33 +0100126 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100127 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100128end_vector_entry mmu_serror_aarch64
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100129
130 /* ---------------------------------------------------------------------
131 * Lower EL using AArch32 : 0x600 - 0x800
132 * ---------------------------------------------------------------------
133 */
134vector_entry mmu_sync_exception_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100135 apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100136 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100137end_vector_entry mmu_sync_exception_aarch32
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100138
139vector_entry mmu_irq_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100140 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100141 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100142end_vector_entry mmu_irq_aarch32
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100143
144vector_entry mmu_fiq_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100145 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100146 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100147end_vector_entry mmu_fiq_aarch32
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100148
149vector_entry mmu_serror_aarch32
Dimitris Papastamos66946332018-05-31 11:38:33 +0100150 apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Dimitris Papastamos570c06a2018-04-06 15:29:34 +0100151 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100152end_vector_entry mmu_serror_aarch32