Merge pull request #1228 from dp-arm/dp/cve_2017_5715
Workarounds for CVE-2017-5715 on A9/A15 and A17 + serial console reporting
diff --git a/bl32/sp_min/aarch32/entrypoint.S b/bl32/sp_min/aarch32/entrypoint.S
index b2b7953..e7528d3 100644
--- a/bl32/sp_min/aarch32/entrypoint.S
+++ b/bl32/sp_min/aarch32/entrypoint.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -17,6 +17,8 @@
.globl sp_min_vector_table
.globl sp_min_entrypoint
.globl sp_min_warm_entrypoint
+ .globl sp_min_handle_smc
+ .globl sp_min_handle_fiq
.macro route_fiq_to_sp_min reg
/* -----------------------------------------------------
@@ -43,12 +45,12 @@
vector_base sp_min_vector_table
b sp_min_entrypoint
b plat_panic_handler /* Undef */
- b handle_smc /* Syscall */
+ b sp_min_handle_smc /* Syscall */
b plat_panic_handler /* Prefetch abort */
b plat_panic_handler /* Data abort */
b plat_panic_handler /* Reserved */
b plat_panic_handler /* IRQ */
- b handle_fiq /* FIQ */
+ b sp_min_handle_fiq /* FIQ */
/*
@@ -151,7 +153,7 @@
/*
* SMC handling function for SP_MIN.
*/
-func handle_smc
+func sp_min_handle_smc
/* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */
str lr, [sp, #SMC_CTX_LR_MON]
@@ -199,12 +201,12 @@
/* `r0` points to `smc_ctx_t` */
b sp_min_exit
-endfunc handle_smc
+endfunc sp_min_handle_smc
/*
* Secure Interrupts handling function for SP_MIN.
*/
-func handle_fiq
+func sp_min_handle_fiq
#if !SP_MIN_WITH_SECURE_FIQ
b plat_panic_handler
#else
@@ -242,7 +244,7 @@
b sp_min_exit
#endif
-endfunc handle_fiq
+endfunc sp_min_handle_fiq
/*
* The Warm boot entrypoint for SP_MIN.
diff --git a/bl32/sp_min/sp_min.mk b/bl32/sp_min/sp_min.mk
index 56489a3..67a1981 100644
--- a/bl32/sp_min/sp_min.mk
+++ b/bl32/sp_min/sp_min.mk
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+# Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -26,6 +26,11 @@
BL32_SOURCES += lib/extensions/amu/aarch32/amu.c
endif
+ifeq (${WORKAROUND_CVE_2017_5715},1)
+BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \
+ bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
+endif
+
BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S
# Include the platform-specific SP_MIN Makefile
diff --git a/bl32/sp_min/workaround_cve_2017_5715_bpiall.S b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S
new file mode 100644
index 0000000..5387cef
--- /dev/null
+++ b/bl32/sp_min/workaround_cve_2017_5715_bpiall.S
@@ -0,0 +1,74 @@
+/*
+ * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <asm_macros.S>
+
+ .globl workaround_bpiall_runtime_exceptions
+
+vector_base workaround_bpiall_runtime_exceptions
+ /* We encode the exception entry in the bottom 3 bits of SP */
+ add sp, sp, #1 /* Reset: 0b111 */
+ add sp, sp, #1 /* Undef: 0b110 */
+ add sp, sp, #1 /* Syscall: 0b101 */
+ add sp, sp, #1 /* Prefetch abort: 0b100 */
+ add sp, sp, #1 /* Data abort: 0b011 */
+ add sp, sp, #1 /* Reserved: 0b010 */
+ add sp, sp, #1 /* IRQ: 0b001 */
+ nop /* FIQ: 0b000 */
+
+ /*
+ * Invalidate the branch predictor, `r0` is a dummy register
+ * and is unused.
+ */
+ stcopr r0, BPIALL
+ isb
+
+ /*
+ * As we cannot use any temporary registers and cannot
+ * clobber SP, we can decode the exception entry using
+ * an unrolled binary search.
+ *
+ * Note, if this code is re-used by other secure payloads,
+ * the below exception entry vectors must be changed to
+ * the vectors specific to that secure payload.
+ */
+
+ tst sp, #4
+ bne 1f
+
+ tst sp, #2
+ bne 3f
+
+ /* Expected encoding: 0x1 and 0x0 */
+ tst sp, #1
+ /* Restore original value of SP by clearing the bottom 3 bits */
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* IRQ */
+ b sp_min_handle_fiq /* FIQ */
+
+1:
+ tst sp, #2
+ bne 2f
+
+ /* Expected encoding: 0x4 and 0x5 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_handle_smc /* Syscall */
+ b plat_panic_handler /* Prefetch abort */
+
+2:
+ /* Expected encoding: 0x7 and 0x6 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_entrypoint /* Reset */
+ b plat_panic_handler /* Undef */
+
+3:
+ /* Expected encoding: 0x2 and 0x3 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* Data abort */
+ b plat_panic_handler /* Reserved */
diff --git a/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
new file mode 100644
index 0000000..9102b02
--- /dev/null
+++ b/bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <asm_macros.S>
+
+ .globl workaround_icache_inv_runtime_exceptions
+
+vector_base workaround_icache_inv_runtime_exceptions
+ /* We encode the exception entry in the bottom 3 bits of SP */
+ add sp, sp, #1 /* Reset: 0b111 */
+ add sp, sp, #1 /* Undef: 0b110 */
+ add sp, sp, #1 /* Syscall: 0b101 */
+ add sp, sp, #1 /* Prefetch abort: 0b100 */
+ add sp, sp, #1 /* Data abort: 0b011 */
+ add sp, sp, #1 /* Reserved: 0b010 */
+ add sp, sp, #1 /* IRQ: 0b001 */
+ nop /* FIQ: 0b000 */
+
+ /*
+ * Invalidate the instruction cache, which we assume also
+ * invalidates the branch predictor. This may depend on
+ * other CPU specific changes (e.g. an ACTLR setting).
+ */
+ stcopr r0, ICIALLU
+ isb
+
+ /*
+ * As we cannot use any temporary registers and cannot
+ * clobber SP, we can decode the exception entry using
+ * an unrolled binary search.
+ *
+ * Note, if this code is re-used by other secure payloads,
+ * the below exception entry vectors must be changed to
+ * the vectors specific to that secure payload.
+ */
+
+ tst sp, #4
+ bne 1f
+
+ tst sp, #2
+ bne 3f
+
+ /* Expected encoding: 0x1 and 0x0 */
+ tst sp, #1
+ /* Restore original value of SP by clearing the bottom 3 bits */
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* IRQ */
+ b sp_min_handle_fiq /* FIQ */
+
+1:
+ /* Expected encoding: 0x4 and 0x5 */
+ tst sp, #2
+ bne 2f
+
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_handle_smc /* Syscall */
+ b plat_panic_handler /* Prefetch abort */
+
+2:
+ /* Expected encoding: 0x7 and 0x6 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_entrypoint /* Reset */
+ b plat_panic_handler /* Undef */
+
+3:
+ /* Expected encoding: 0x2 and 0x3 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* Data abort */
+ b plat_panic_handler /* Reserved */
diff --git a/include/common/aarch32/el3_common_macros.S b/include/common/aarch32/el3_common_macros.S
index d654b65..5db8854 100644
--- a/include/common/aarch32/el3_common_macros.S
+++ b/include/common/aarch32/el3_common_macros.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -14,7 +14,7 @@
/*
* Helper macro to initialise EL3 registers we care about.
*/
- .macro el3_arch_init_common _exception_vectors
+ .macro el3_arch_init_common
/* ---------------------------------------------------------------------
* SCTLR has already been initialised - read current value before
* modifying.
@@ -34,15 +34,6 @@
isb
/* ---------------------------------------------------------------------
- * Set the exception vectors (VBAR/MVBAR).
- * ---------------------------------------------------------------------
- */
- ldr r0, =\_exception_vectors
- stcopr r0, VBAR
- stcopr r0, MVBAR
- isb
-
- /* ---------------------------------------------------------------------
* Initialise SCR, setting all fields rather than relying on the hw.
*
* SCR.SIF: Enabled so that Secure state instruction fetches from
@@ -211,6 +202,15 @@
.endif /* _warm_boot_mailbox */
/* ---------------------------------------------------------------------
+ * Set the exception vectors (VBAR/MVBAR).
+ * ---------------------------------------------------------------------
+ */
+ ldr r0, =\_exception_vectors
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ isb
+
+ /* ---------------------------------------------------------------------
* It is a cold boot.
* Perform any processor specific actions upon reset e.g. cache, TLB
* invalidations etc.
@@ -218,7 +218,7 @@
*/
bl reset_handler
- el3_arch_init_common \_exception_vectors
+ el3_arch_init_common
.if \_secondary_cold_boot
/* -------------------------------------------------------------
diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h
index 4d2a5fc..134d534 100644
--- a/include/lib/aarch32/arch.h
+++ b/include/lib/aarch32/arch.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -426,6 +426,8 @@
#define TLBIMVAA p15, 0, c8, c7, 3
#define TLBIMVAAIS p15, 0, c8, c3, 3
#define BPIALLIS p15, 0, c7, c1, 6
+#define BPIALL p15, 0, c7, c5, 6
+#define ICIALLU p15, 0, c7, c5, 0
#define HSCTLR p15, 4, c1, c0, 0
#define HCR p15, 4, c1, c1, 0
#define HCPTR p15, 4, c1, c1, 2
diff --git a/include/lib/aarch32/smcc_helpers.h b/include/lib/aarch32/smcc_helpers.h
index 53f1aa4..ed3b722 100644
--- a/include/lib/aarch32/smcc_helpers.h
+++ b/include/lib/aarch32/smcc_helpers.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -22,7 +22,7 @@
#define SMC_CTX_LR_MON 0x80
#define SMC_CTX_SCR 0x84
#define SMC_CTX_PMCR 0x88
-#define SMC_CTX_SIZE 0x8C
+#define SMC_CTX_SIZE 0x90
#ifndef __ASSEMBLY__
#include <cassert.h>
@@ -75,7 +75,13 @@
u_register_t lr_mon;
u_register_t scr;
u_register_t pmcr;
-} smc_ctx_t;
+ /*
+ * The workaround for CVE-2017-5715 requires storing information in
+ * the bottom 3 bits of the stack pointer. Add a padding field to
+ * force the size of the struct to be a multiple of 8.
+ */
+ u_register_t pad;
+} smc_ctx_t __aligned(8);
/*
* Compile time assertions related to the 'smc_context' structure to
@@ -99,6 +105,7 @@
CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \
assert_smc_ctx_spsr_mon_offset_mismatch);
+CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned);
CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch);
/* Convenience macros to return from SMC handler */
diff --git a/include/lib/cpus/aarch32/cortex_a15.h b/include/lib/cpus/aarch32/cortex_a15.h
index 905c139..0f01a43 100644
--- a/include/lib/cpus/aarch32/cortex_a15.h
+++ b/include/lib/cpus/aarch32/cortex_a15.h
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -15,6 +15,7 @@
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
+#define CORTEX_A15_ACTLR_INV_BTB_BIT (1 << 0)
#define CORTEX_A15_ACTLR_SMP_BIT (1 << 6)
#endif /* __CORTEX_A15_H__ */
diff --git a/lib/cpus/aarch32/cortex_a15.S b/lib/cpus/aarch32/cortex_a15.S
index 0d5a116..b6c61ab 100644
--- a/lib/cpus/aarch32/cortex_a15.S
+++ b/lib/cpus/aarch32/cortex_a15.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -41,7 +41,46 @@
bx lr
endfunc cortex_a15_enable_smp
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A15. Must follow AAPCS.
+ */
+func cortex_a15_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a15_errata_report
+#endif
+
func cortex_a15_reset_func
+#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
+ ldcopr r0, ACTLR
+ orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
+ stcopr r0, ACTLR
+ ldr r0, =workaround_icache_inv_runtime_exceptions
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ /* isb will be applied in the course of the reset func */
+#endif
b cortex_a15_enable_smp
endfunc cortex_a15_reset_func
diff --git a/lib/cpus/aarch32/cortex_a17.S b/lib/cpus/aarch32/cortex_a17.S
index 316d4f0..b84c126 100644
--- a/lib/cpus/aarch32/cortex_a17.S
+++ b/lib/cpus/aarch32/cortex_a17.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -35,7 +35,43 @@
bx lr
endfunc cortex_a17_enable_smp
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A17. Must follow AAPCS.
+ */
+func cortex_a17_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a17, cve_2017_5715
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a17_errata_report
+#endif
+
func cortex_a17_reset_func
+#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
+ ldr r0, =workaround_bpiall_runtime_exceptions
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ /* isb will be applied in the course of the reset func */
+#endif
b cortex_a17_enable_smp
endfunc cortex_a17_reset_func
diff --git a/lib/cpus/aarch32/cortex_a57.S b/lib/cpus/aarch32/cortex_a57.S
index 64a6d67..f446bff 100644
--- a/lib/cpus/aarch32/cortex_a57.S
+++ b/lib/cpus/aarch32/cortex_a57.S
@@ -332,6 +332,11 @@
b cpu_rev_var_ls
endfunc check_errata_859972
+func check_errata_cve_2017_5715
+ mov r0, #ERRATA_MISSING
+ bx lr
+endfunc check_errata_cve_2017_5715
+
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* Shall clobber: r0-r6
@@ -519,6 +524,7 @@
report_errata ERRATA_A57_829520, cortex_a57, 829520
report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
pop {r12, lr}
bx lr
diff --git a/lib/cpus/aarch32/cortex_a72.S b/lib/cpus/aarch32/cortex_a72.S
index 35b9bc2..56e91f5 100644
--- a/lib/cpus/aarch32/cortex_a72.S
+++ b/lib/cpus/aarch32/cortex_a72.S
@@ -87,6 +87,10 @@
b cpu_rev_var_ls
endfunc check_errata_859971
+func check_errata_cve_2017_5715
+ mov r0, #ERRATA_MISSING
+ bx lr
+endfunc check_errata_cve_2017_5715
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
@@ -236,6 +240,7 @@
* checking functions of each errata.
*/
report_errata ERRATA_A72_859971, cortex_a72, 859971
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
pop {r12, lr}
bx lr
diff --git a/lib/cpus/aarch32/cortex_a9.S b/lib/cpus/aarch32/cortex_a9.S
index 4f30f84..1fb10b2 100644
--- a/lib/cpus/aarch32/cortex_a9.S
+++ b/lib/cpus/aarch32/cortex_a9.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -35,7 +35,43 @@
bx lr
endfunc cortex_a9_enable_smp
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov r0, #ERRATA_APPLIES
+#else
+ mov r0, #ERRATA_MISSING
+#endif
+ bx lr
+endfunc check_errata_cve_2017_5715
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A9. Must follow AAPCS.
+ */
+func cortex_a9_errata_report
+ push {r12, lr}
+
+ bl cpu_get_rev_var
+ mov r4, r0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a9, cve_2017_5715
+
+ pop {r12, lr}
+ bx lr
+endfunc cortex_a9_errata_report
+#endif
+
func cortex_a9_reset_func
+#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
+ ldr r0, =workaround_bpiall_runtime_exceptions
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ /* isb will be applied in the course of the reset func */
+#endif
b cortex_a9_enable_smp
endfunc cortex_a9_reset_func
diff --git a/lib/cpus/aarch64/cortex_a57.S b/lib/cpus/aarch64/cortex_a57.S
index 683be47..c82ebfc 100644
--- a/lib/cpus/aarch64/cortex_a57.S
+++ b/lib/cpus/aarch64/cortex_a57.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2014-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2014-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -328,6 +328,15 @@
b cpu_rev_var_ls
endfunc check_errata_859972
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2017_5715
+
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* Shall clobber: x0-x19
@@ -518,7 +527,7 @@
report_errata ERRATA_A57_829520, cortex_a57, 829520
report_errata ERRATA_A57_833471, cortex_a57, 833471
report_errata ERRATA_A57_859972, cortex_a57, 859972
-
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a57, cve_2017_5715
ldp x8, x30, [sp], #16
ret
diff --git a/lib/cpus/aarch64/cortex_a72.S b/lib/cpus/aarch64/cortex_a72.S
index 93821b7..9633aa8 100644
--- a/lib/cpus/aarch64/cortex_a72.S
+++ b/lib/cpus/aarch64/cortex_a72.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2015-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -97,6 +97,15 @@
b cpu_rev_var_ls
endfunc check_errata_859971
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2017_5715
+
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
* -------------------------------------------------
@@ -249,6 +258,7 @@
* checking functions of each errata.
*/
report_errata ERRATA_A72_859971, cortex_a72, 859971
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a72, cve_2017_5715
ldp x8, x30, [sp], #16
ret
diff --git a/lib/cpus/aarch64/cortex_a73.S b/lib/cpus/aarch64/cortex_a73.S
index c43f07e..11680a0 100644
--- a/lib/cpus/aarch64/cortex_a73.S
+++ b/lib/cpus/aarch64/cortex_a73.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -114,6 +114,36 @@
b cortex_a73_disable_smp
endfunc cortex_a73_cluster_pwr_dwn
+func check_errata_cve_2017_5715
+#if WORKAROUND_CVE_2017_5715
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+endfunc check_errata_cve_2017_5715
+
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A75. Must follow AAPCS.
+ */
+func cortex_a73_errata_report
+ stp x8, x30, [sp, #-16]!
+
+ bl cpu_get_rev_var
+ mov x8, x0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a73, cve_2017_5715
+
+ ldp x8, x30, [sp], #16
+ ret
+endfunc cortex_a73_errata_report
+#endif
+
/* ---------------------------------------------
* This function provides cortex_a73 specific
* register information for crash reporting.
diff --git a/lib/cpus/aarch64/cortex_a75.S b/lib/cpus/aarch64/cortex_a75.S
index e66ad06..946f988 100644
--- a/lib/cpus/aarch64/cortex_a75.S
+++ b/lib/cpus/aarch64/cortex_a75.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -151,6 +151,27 @@
ret
endfunc cortex_a75_reset_func
+func check_errata_cve_2017_5715
+ mrs x0, id_aa64pfr0_el1
+ ubfx x0, x0, #ID_AA64PFR0_CSV2_SHIFT, #ID_AA64PFR0_CSV2_LENGTH
+ /*
+ * If the field equals to 1 then branch targets trained in one
+ * context cannot affect speculative execution in a different context.
+ */
+ cmp x0, #1
+ beq 1f
+
+#if WORKAROUND_CVE_2017_5715
+ mov x0, #ERRATA_APPLIES
+#else
+ mov x0, #ERRATA_MISSING
+#endif
+ ret
+1:
+ mov x0, #ERRATA_NOT_APPLIES
+ ret
+endfunc check_errata_cve_2017_5715
+
/* ---------------------------------------------
* HW will do the cache maintenance while powering down
* ---------------------------------------------
@@ -167,6 +188,27 @@
ret
endfunc cortex_a75_core_pwr_dwn
+#if REPORT_ERRATA
+/*
+ * Errata printing function for Cortex A75. Must follow AAPCS.
+ */
+func cortex_a75_errata_report
+ stp x8, x30, [sp, #-16]!
+
+ bl cpu_get_rev_var
+ mov x8, x0
+
+ /*
+ * Report all errata. The revision-variant information is passed to
+ * checking functions of each errata.
+ */
+ report_errata WORKAROUND_CVE_2017_5715, cortex_a75, cve_2017_5715
+
+ ldp x8, x30, [sp], #16
+ ret
+endfunc cortex_a75_errata_report
+#endif
+
/* ---------------------------------------------
* This function provides cortex_a75 specific
* register information for crash reporting.
diff --git a/lib/cpus/errata_report.c b/lib/cpus/errata_report.c
index 182679d..c679336 100644
--- a/lib/cpus/errata_report.c
+++ b/lib/cpus/errata_report.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -27,7 +27,7 @@
#endif
/* Errata format: BL stage, CPU, errata ID, message */
-#define ERRATA_FORMAT "%s: %s: errata workaround for %s was %s\n"
+#define ERRATA_FORMAT "%s: %s: CPU workaround for %s was %s\n"
/*
* Returns whether errata needs to be reported. Passed arguments are private to