Etienne Carriere | 4ece755 | 2017-11-05 22:56:10 +0100 | [diff] [blame] | 1 | /* |
John Powell | 7f7c6fa | 2022-04-14 19:10:17 -0500 | [diff] [blame] | 2 | * Copyright (c) 2016-2022, Arm Limited and Contributors. All rights reserved. |
Etienne Carriere | 4ece755 | 2017-11-05 22:56:10 +0100 | [diff] [blame] | 3 | * |
| 4 | * SPDX-License-Identifier: BSD-3-Clause |
| 5 | */ |
| 6 | |
| 7 | #include <arch.h> |
| 8 | #include <asm_macros.S> |
| 9 | #include <assert_macros.S> |
| 10 | #include <cortex_a15.h> |
| 11 | #include <cpu_macros.S> |
| 12 | |
| 13 | /* |
| 14 | * Cortex-A15 support LPAE and Virtualization Extensions. |
| 15 | * Don't care if confiugration uses or not LPAE and VE. |
| 16 | * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE |
| 17 | */ |
| 18 | |
| 19 | .macro assert_cache_enabled |
| 20 | #if ENABLE_ASSERTIONS |
| 21 | ldcopr r0, SCTLR |
| 22 | tst r0, #SCTLR_C_BIT |
| 23 | ASM_ASSERT(eq) |
| 24 | #endif |
| 25 | .endm |
| 26 | |
| 27 | func cortex_a15_disable_smp |
| 28 | ldcopr r0, ACTLR |
| 29 | bic r0, #CORTEX_A15_ACTLR_SMP_BIT |
| 30 | stcopr r0, ACTLR |
| 31 | isb |
Ambroise Vincent | d4a51eb | 2019-03-04 16:56:26 +0000 | [diff] [blame] | 32 | #if ERRATA_A15_816470 |
| 33 | /* |
| 34 | * Invalidate any TLB address |
| 35 | */ |
| 36 | mov r0, #0 |
| 37 | stcopr r0, TLBIMVA |
| 38 | #endif |
Etienne Carriere | 4ece755 | 2017-11-05 22:56:10 +0100 | [diff] [blame] | 39 | dsb sy |
| 40 | bx lr |
| 41 | endfunc cortex_a15_disable_smp |
| 42 | |
| 43 | func cortex_a15_enable_smp |
| 44 | ldcopr r0, ACTLR |
| 45 | orr r0, #CORTEX_A15_ACTLR_SMP_BIT |
| 46 | stcopr r0, ACTLR |
| 47 | isb |
| 48 | bx lr |
| 49 | endfunc cortex_a15_enable_smp |
| 50 | |
Ambroise Vincent | d4a51eb | 2019-03-04 16:56:26 +0000 | [diff] [blame] | 51 | /* ---------------------------------------------------- |
| 52 | * Errata Workaround for Cortex A15 Errata #816470. |
| 53 | * This applies only to revision >= r3p0 of Cortex A15. |
| 54 | * ---------------------------------------------------- |
| 55 | */ |
| 56 | func check_errata_816470 |
| 57 | /* |
| 58 | * Even though this is only needed for revision >= r3p0, it is always |
| 59 | * applied because of the low cost of the workaround. |
| 60 | */ |
| 61 | mov r0, #ERRATA_APPLIES |
| 62 | bx lr |
| 63 | endfunc check_errata_816470 |
| 64 | |
Ambroise Vincent | 68b3812 | 2019-03-05 09:54:21 +0000 | [diff] [blame] | 65 | /* ---------------------------------------------------- |
| 66 | * Errata Workaround for Cortex A15 Errata #827671. |
| 67 | * This applies only to revision >= r3p0 of Cortex A15. |
| 68 | * Inputs: |
| 69 | * r0: variant[4:7] and revision[0:3] of current cpu. |
| 70 | * Shall clobber: r0-r3 |
| 71 | * ---------------------------------------------------- |
| 72 | */ |
| 73 | func errata_a15_827671_wa |
| 74 | /* |
| 75 | * Compare r0 against revision r3p0 |
| 76 | */ |
| 77 | mov r2, lr |
| 78 | bl check_errata_827671 |
| 79 | cmp r0, #ERRATA_NOT_APPLIES |
| 80 | beq 1f |
| 81 | ldcopr r0, CORTEX_A15_ACTLR2 |
| 82 | orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT |
| 83 | stcopr r0, CORTEX_A15_ACTLR2 |
| 84 | isb |
| 85 | 1: |
| 86 | bx r2 |
| 87 | endfunc errata_a15_827671_wa |
| 88 | |
| 89 | func check_errata_827671 |
| 90 | mov r1, #0x30 |
| 91 | b cpu_rev_var_hs |
| 92 | endfunc check_errata_827671 |
| 93 | |
Dimitris Papastamos | 8ca0af2 | 2018-01-03 10:48:59 +0000 | [diff] [blame] | 94 | func check_errata_cve_2017_5715 |
| 95 | #if WORKAROUND_CVE_2017_5715 |
| 96 | mov r0, #ERRATA_APPLIES |
| 97 | #else |
| 98 | mov r0, #ERRATA_MISSING |
| 99 | #endif |
| 100 | bx lr |
| 101 | endfunc check_errata_cve_2017_5715 |
| 102 | |
John Powell | 7f7c6fa | 2022-04-14 19:10:17 -0500 | [diff] [blame] | 103 | func check_errata_cve_2022_23960 |
| 104 | #if WORKAROUND_CVE_2022_23960 |
| 105 | mov r0, #ERRATA_APPLIES |
| 106 | #else |
| 107 | mov r0, #ERRATA_MISSING |
| 108 | #endif |
| 109 | bx lr |
| 110 | endfunc check_errata_cve_2022_23960 |
| 111 | |
Dimitris Papastamos | 8ca0af2 | 2018-01-03 10:48:59 +0000 | [diff] [blame] | 112 | #if REPORT_ERRATA |
| 113 | /* |
| 114 | * Errata printing function for Cortex A15. Must follow AAPCS. |
| 115 | */ |
| 116 | func cortex_a15_errata_report |
| 117 | push {r12, lr} |
| 118 | |
| 119 | bl cpu_get_rev_var |
| 120 | mov r4, r0 |
| 121 | |
| 122 | /* |
| 123 | * Report all errata. The revision-variant information is passed to |
| 124 | * checking functions of each errata. |
| 125 | */ |
Ambroise Vincent | d4a51eb | 2019-03-04 16:56:26 +0000 | [diff] [blame] | 126 | report_errata ERRATA_A15_816470, cortex_a15, 816470 |
Ambroise Vincent | 68b3812 | 2019-03-05 09:54:21 +0000 | [diff] [blame] | 127 | report_errata ERRATA_A15_827671, cortex_a15, 827671 |
Dimitris Papastamos | 8ca0af2 | 2018-01-03 10:48:59 +0000 | [diff] [blame] | 128 | report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715 |
John Powell | 7f7c6fa | 2022-04-14 19:10:17 -0500 | [diff] [blame] | 129 | report_errata WORKAROUND_CVE_2022_23960, cortex_a15, cve_2022_23960 |
Dimitris Papastamos | 8ca0af2 | 2018-01-03 10:48:59 +0000 | [diff] [blame] | 130 | |
| 131 | pop {r12, lr} |
| 132 | bx lr |
| 133 | endfunc cortex_a15_errata_report |
| 134 | #endif |
| 135 | |
Etienne Carriere | 4ece755 | 2017-11-05 22:56:10 +0100 | [diff] [blame] | 136 | func cortex_a15_reset_func |
Ambroise Vincent | 68b3812 | 2019-03-05 09:54:21 +0000 | [diff] [blame] | 137 | mov r5, lr |
| 138 | bl cpu_get_rev_var |
| 139 | |
| 140 | #if ERRATA_A15_827671 |
| 141 | bl errata_a15_827671_wa |
| 142 | #endif |
| 143 | |
John Powell | 7f7c6fa | 2022-04-14 19:10:17 -0500 | [diff] [blame] | 144 | #if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960) |
Dimitris Papastamos | 8ca0af2 | 2018-01-03 10:48:59 +0000 | [diff] [blame] | 145 | ldcopr r0, ACTLR |
| 146 | orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT |
| 147 | stcopr r0, ACTLR |
John Powell | 7f7c6fa | 2022-04-14 19:10:17 -0500 | [diff] [blame] | 148 | ldr r0, =wa_cve_2017_5715_icache_inv_vbar |
Dimitris Papastamos | 8ca0af2 | 2018-01-03 10:48:59 +0000 | [diff] [blame] | 149 | stcopr r0, VBAR |
| 150 | stcopr r0, MVBAR |
| 151 | /* isb will be applied in the course of the reset func */ |
| 152 | #endif |
Ambroise Vincent | 68b3812 | 2019-03-05 09:54:21 +0000 | [diff] [blame] | 153 | |
| 154 | mov lr, r5 |
Etienne Carriere | 4ece755 | 2017-11-05 22:56:10 +0100 | [diff] [blame] | 155 | b cortex_a15_enable_smp |
| 156 | endfunc cortex_a15_reset_func |
| 157 | |
| 158 | func cortex_a15_core_pwr_dwn |
| 159 | push {r12, lr} |
| 160 | |
| 161 | assert_cache_enabled |
| 162 | |
| 163 | /* Flush L1 cache */ |
| 164 | mov r0, #DC_OP_CISW |
| 165 | bl dcsw_op_level1 |
| 166 | |
| 167 | /* Exit cluster coherency */ |
| 168 | pop {r12, lr} |
| 169 | b cortex_a15_disable_smp |
| 170 | endfunc cortex_a15_core_pwr_dwn |
| 171 | |
| 172 | func cortex_a15_cluster_pwr_dwn |
| 173 | push {r12, lr} |
| 174 | |
| 175 | assert_cache_enabled |
| 176 | |
| 177 | /* Flush L1 caches */ |
| 178 | mov r0, #DC_OP_CISW |
| 179 | bl dcsw_op_level1 |
| 180 | |
| 181 | bl plat_disable_acp |
| 182 | |
| 183 | /* Exit cluster coherency */ |
| 184 | pop {r12, lr} |
| 185 | b cortex_a15_disable_smp |
| 186 | endfunc cortex_a15_cluster_pwr_dwn |
| 187 | |
| 188 | declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \ |
| 189 | cortex_a15_reset_func, \ |
| 190 | cortex_a15_core_pwr_dwn, \ |
| 191 | cortex_a15_cluster_pwr_dwn |