blob: 57d211ed7cd3abdb90c70208b816c83939304ad4 [file] [log] [blame]
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00001/*
Stephan Gerhold9af54e12023-08-04 15:46:50 +02002 * Copyright (c) 2018-2023, Arm Limited and Contributors. All rights reserved.
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00007#include <common/debug.h>
8#include <common/runtime_svc.h>
Boyan Karatotev5d38cb32023-01-27 09:37:07 +00009#include <lib/cpus/errata.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000010#include <lib/cpus/wa_cve_2017_5715.h>
11#include <lib/cpus/wa_cve_2018_3639.h>
Bipin Ravicaa2e052022-02-23 23:45:50 -060012#include <lib/cpus/wa_cve_2022_23960.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000013#include <lib/smccc.h>
14#include <services/arm_arch_svc.h>
Antonio Nino Diaz3c817f42018-03-21 10:49:27 +000015#include <smccc_helpers.h>
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000016#include <plat/common/platform.h>
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000017
18static int32_t smccc_version(void)
19{
20 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
21}
22
Manish V Badarkhe709bc372020-04-28 13:25:56 +010023static int32_t smccc_arch_features(u_register_t arg1)
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000024{
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000025 switch (arg1) {
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000026 case SMCCC_VERSION:
27 case SMCCC_ARCH_FEATURES:
Manish V Badarkhe80f13ee2020-07-23 20:23:01 +010028 return SMC_ARCH_CALL_SUCCESS;
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000029 case SMCCC_ARCH_SOC_ID:
Manish V Badarkhe80f13ee2020-07-23 20:23:01 +010030 return plat_is_smccc_feature_available(arg1);
Stephan Gerhold9af54e12023-08-04 15:46:50 +020031#ifdef __aarch64__
32 /* Workaround checks are currently only implemented for aarch64 */
Dimitris Papastamos6d1f4992018-03-28 12:06:40 +010033#if WORKAROUND_CVE_2017_5715
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000034 case SMCCC_ARCH_WORKAROUND_1:
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010035 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
Dimitris Papastamos914757c2018-03-12 14:47:09 +000036 return 1;
Dimitris Papastamos6d1f4992018-03-28 12:06:40 +010037 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
38#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000039
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010040#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000041 case SMCCC_ARCH_WORKAROUND_2: {
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010042#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000043 unsigned long long ssbs;
44
45 /*
46 * Firmware doesn't have to carry out dynamic workaround if the
47 * PE implements architectural Speculation Store Bypass Safe
48 * (SSBS) feature.
49 */
Dimitris Papastamosb091eb92019-02-27 11:46:48 +000050 ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000051 ID_AA64PFR1_EL1_SSBS_MASK;
52
53 /*
54 * If architectural SSBS is available on this PE, no firmware
55 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
56 */
57 if (ssbs != SSBS_UNAVAILABLE)
58 return 1;
59
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010060 /*
61 * On a platform where at least one CPU requires
62 * dynamic mitigation but others are either unaffected
63 * or permanently mitigated, report the latter as not
64 * needing dynamic mitigation.
65 */
66 if (wa_cve_2018_3639_get_disable_ptr() == NULL)
67 return 1;
68 /*
69 * If we get here, this CPU requires dynamic mitigation
70 * so report it as such.
71 */
72 return 0;
73#else
74 /* Either the CPUs are unaffected or permanently mitigated */
Manish V Badarkhe13335172020-02-19 13:36:50 +000075 return SMC_ARCH_CALL_NOT_REQUIRED;
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010076#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000077 }
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010078#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000079
Bipin Ravicaa2e052022-02-23 23:45:50 -060080#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
81 case SMCCC_ARCH_WORKAROUND_3:
82 /*
83 * SMCCC_ARCH_WORKAROUND_3 should also take into account
84 * CVE-2017-5715 since this SMC can be used instead of
85 * SMCCC_ARCH_WORKAROUND_1.
86 */
87 if ((check_smccc_arch_wa3_applies() == ERRATA_NOT_APPLIES) &&
88 (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)) {
89 return 1;
90 }
91 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
92#endif
Stephan Gerhold9af54e12023-08-04 15:46:50 +020093#endif /* __aarch64__ */
Bipin Ravicaa2e052022-02-23 23:45:50 -060094
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000095 /* Fallthrough */
96
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000097 default:
98 return SMC_UNK;
99 }
Manish V Badarkhe709bc372020-04-28 13:25:56 +0100100}
101
102/* return soc revision or soc version on success otherwise
103 * return invalid parameter */
104static int32_t smccc_arch_id(u_register_t arg1)
105{
106 if (arg1 == SMCCC_GET_SOC_REVISION) {
107 return plat_get_soc_revision();
108 }
109 if (arg1 == SMCCC_GET_SOC_VERSION) {
110 return plat_get_soc_version();
111 }
112 return SMC_ARCH_CALL_INVAL_PARAM;
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000113}
114
115/*
116 * Top-level Arm Architectural Service SMC handler.
117 */
Roberto Vargas05712702018-02-12 12:36:17 +0000118static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000119 u_register_t x1,
120 u_register_t x2,
121 u_register_t x3,
122 u_register_t x4,
123 void *cookie,
124 void *handle,
125 u_register_t flags)
126{
127 switch (smc_fid) {
128 case SMCCC_VERSION:
129 SMC_RET1(handle, smccc_version());
130 case SMCCC_ARCH_FEATURES:
Manish V Badarkhe709bc372020-04-28 13:25:56 +0100131 SMC_RET1(handle, smccc_arch_features(x1));
132 case SMCCC_ARCH_SOC_ID:
133 SMC_RET1(handle, smccc_arch_id(x1));
Stephan Gerhold9af54e12023-08-04 15:46:50 +0200134#ifdef __aarch64__
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000135#if WORKAROUND_CVE_2017_5715
136 case SMCCC_ARCH_WORKAROUND_1:
137 /*
138 * The workaround has already been applied on affected PEs
Bipin Ravicaa2e052022-02-23 23:45:50 -0600139 * during entry to EL3. On unaffected PEs, this function
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000140 * has no effect.
141 */
142 SMC_RET0(handle);
143#endif
Dimitris Papastamose6625ec2018-04-05 14:38:26 +0100144#if WORKAROUND_CVE_2018_3639
145 case SMCCC_ARCH_WORKAROUND_2:
146 /*
147 * The workaround has already been applied on affected PEs
148 * requiring dynamic mitigation during entry to EL3.
149 * On unaffected or statically mitigated PEs, this function
150 * has no effect.
151 */
152 SMC_RET0(handle);
153#endif
Bipin Ravicaa2e052022-02-23 23:45:50 -0600154#if (WORKAROUND_CVE_2022_23960 || WORKAROUND_CVE_2017_5715)
155 case SMCCC_ARCH_WORKAROUND_3:
156 /*
157 * The workaround has already been applied on affected PEs
158 * during entry to EL3. On unaffected PEs, this function
159 * has no effect.
160 */
161 SMC_RET0(handle);
162#endif
Stephan Gerhold9af54e12023-08-04 15:46:50 +0200163#endif /* __aarch64__ */
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000164 default:
165 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
166 smc_fid);
167 SMC_RET1(handle, SMC_UNK);
168 }
169}
170
171/* Register Standard Service Calls as runtime service */
172DECLARE_RT_SVC(
173 arm_arch_svc,
174 OEN_ARM_START,
175 OEN_ARM_END,
176 SMC_TYPE_FAST,
177 NULL,
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000178 arm_arch_svc_smc_handler
179);