blob: ba539309de3262e99f89055b1a89439da9bae8e4 [file] [log] [blame]
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00001/*
Manish V Badarkhef809c6e2020-02-22 08:43:00 +00002 * Copyright (c) 2018-2020, ARM Limited and Contributors. All rights reserved.
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00007#include <common/debug.h>
8#include <common/runtime_svc.h>
9#include <lib/cpus/errata_report.h>
10#include <lib/cpus/wa_cve_2017_5715.h>
11#include <lib/cpus/wa_cve_2018_3639.h>
12#include <lib/smccc.h>
13#include <services/arm_arch_svc.h>
Antonio Nino Diaz3c817f42018-03-21 10:49:27 +000014#include <smccc_helpers.h>
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000015#include <plat/common/platform.h>
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000016
17static int32_t smccc_version(void)
18{
19 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
20}
21
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000022static int32_t smccc_arch_features(u_register_t arg1, u_register_t arg2)
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000023{
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000024 switch (arg1) {
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000025 case SMCCC_VERSION:
26 case SMCCC_ARCH_FEATURES:
27 return SMC_OK;
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000028 case SMCCC_ARCH_SOC_ID:
29 if (arg2 == SMCCC_GET_SOC_REVISION) {
30 return plat_get_soc_revision();
31 }
32 if (arg2 == SMCCC_GET_SOC_VERSION) {
33 return plat_get_soc_version();
34 }
35 return SMC_ARCH_CALL_INVAL_PARAM;
Dimitris Papastamos6d1f4992018-03-28 12:06:40 +010036#if WORKAROUND_CVE_2017_5715
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000037 case SMCCC_ARCH_WORKAROUND_1:
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010038 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
Dimitris Papastamos914757c2018-03-12 14:47:09 +000039 return 1;
Dimitris Papastamos6d1f4992018-03-28 12:06:40 +010040 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
41#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000042
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010043#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000044 case SMCCC_ARCH_WORKAROUND_2: {
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010045#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000046 unsigned long long ssbs;
47
48 /*
49 * Firmware doesn't have to carry out dynamic workaround if the
50 * PE implements architectural Speculation Store Bypass Safe
51 * (SSBS) feature.
52 */
Dimitris Papastamosb091eb92019-02-27 11:46:48 +000053 ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000054 ID_AA64PFR1_EL1_SSBS_MASK;
55
56 /*
57 * If architectural SSBS is available on this PE, no firmware
58 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
59 */
60 if (ssbs != SSBS_UNAVAILABLE)
61 return 1;
62
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010063 /*
64 * On a platform where at least one CPU requires
65 * dynamic mitigation but others are either unaffected
66 * or permanently mitigated, report the latter as not
67 * needing dynamic mitigation.
68 */
69 if (wa_cve_2018_3639_get_disable_ptr() == NULL)
70 return 1;
71 /*
72 * If we get here, this CPU requires dynamic mitigation
73 * so report it as such.
74 */
75 return 0;
76#else
77 /* Either the CPUs are unaffected or permanently mitigated */
Manish V Badarkhe13335172020-02-19 13:36:50 +000078 return SMC_ARCH_CALL_NOT_REQUIRED;
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010079#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000080 }
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010081#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000082
83 /* Fallthrough */
84
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000085 default:
86 return SMC_UNK;
87 }
88}
89
90/*
91 * Top-level Arm Architectural Service SMC handler.
92 */
Roberto Vargas05712702018-02-12 12:36:17 +000093static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000094 u_register_t x1,
95 u_register_t x2,
96 u_register_t x3,
97 u_register_t x4,
98 void *cookie,
99 void *handle,
100 u_register_t flags)
101{
102 switch (smc_fid) {
103 case SMCCC_VERSION:
104 SMC_RET1(handle, smccc_version());
105 case SMCCC_ARCH_FEATURES:
Manish V Badarkhef809c6e2020-02-22 08:43:00 +0000106 SMC_RET1(handle, smccc_arch_features(x1, x2));
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000107#if WORKAROUND_CVE_2017_5715
108 case SMCCC_ARCH_WORKAROUND_1:
109 /*
110 * The workaround has already been applied on affected PEs
111 * during entry to EL3. On unaffected PEs, this function
112 * has no effect.
113 */
114 SMC_RET0(handle);
115#endif
Dimitris Papastamose6625ec2018-04-05 14:38:26 +0100116#if WORKAROUND_CVE_2018_3639
117 case SMCCC_ARCH_WORKAROUND_2:
118 /*
119 * The workaround has already been applied on affected PEs
120 * requiring dynamic mitigation during entry to EL3.
121 * On unaffected or statically mitigated PEs, this function
122 * has no effect.
123 */
124 SMC_RET0(handle);
125#endif
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000126 default:
127 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
128 smc_fid);
129 SMC_RET1(handle, SMC_UNK);
130 }
131}
132
133/* Register Standard Service Calls as runtime service */
134DECLARE_RT_SVC(
135 arm_arch_svc,
136 OEN_ARM_START,
137 OEN_ARM_END,
138 SMC_TYPE_FAST,
139 NULL,
140 arm_arch_svc_smc_handler
141);