blob: 1d4423cb33bbd42247cb507267c301d9269408e0 [file] [log] [blame]
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00001/*
Zelalem Aweke13dc8f12021-07-09 14:20:03 -05002 * Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00007#include <common/debug.h>
8#include <common/runtime_svc.h>
9#include <lib/cpus/errata_report.h>
10#include <lib/cpus/wa_cve_2017_5715.h>
11#include <lib/cpus/wa_cve_2018_3639.h>
12#include <lib/smccc.h>
13#include <services/arm_arch_svc.h>
Zelalem Aweke13dc8f12021-07-09 14:20:03 -050014#include <services/rmi_svc.h>
15#include <services/rmmd_svc.h>
Antonio Nino Diaz3c817f42018-03-21 10:49:27 +000016#include <smccc_helpers.h>
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000017#include <plat/common/platform.h>
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000018
Zelalem Aweke13dc8f12021-07-09 14:20:03 -050019#if ENABLE_RME
20/* Setup Arm architecture Services */
21static int32_t arm_arch_svc_setup(void)
22{
23 return rmmd_setup();
24}
25#endif
26
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000027static int32_t smccc_version(void)
28{
29 return MAKE_SMCCC_VERSION(SMCCC_MAJOR_VERSION, SMCCC_MINOR_VERSION);
30}
31
Manish V Badarkhe709bc372020-04-28 13:25:56 +010032static int32_t smccc_arch_features(u_register_t arg1)
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000033{
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000034 switch (arg1) {
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000035 case SMCCC_VERSION:
36 case SMCCC_ARCH_FEATURES:
Manish V Badarkhe80f13ee2020-07-23 20:23:01 +010037 return SMC_ARCH_CALL_SUCCESS;
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000038 case SMCCC_ARCH_SOC_ID:
Manish V Badarkhe80f13ee2020-07-23 20:23:01 +010039 return plat_is_smccc_feature_available(arg1);
Dimitris Papastamos6d1f4992018-03-28 12:06:40 +010040#if WORKAROUND_CVE_2017_5715
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000041 case SMCCC_ARCH_WORKAROUND_1:
Dimitris Papastamos570c06a2018-04-06 15:29:34 +010042 if (check_wa_cve_2017_5715() == ERRATA_NOT_APPLIES)
Dimitris Papastamos914757c2018-03-12 14:47:09 +000043 return 1;
Dimitris Papastamos6d1f4992018-03-28 12:06:40 +010044 return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
45#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000046
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010047#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000048 case SMCCC_ARCH_WORKAROUND_2: {
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010049#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000050 unsigned long long ssbs;
51
52 /*
53 * Firmware doesn't have to carry out dynamic workaround if the
54 * PE implements architectural Speculation Store Bypass Safe
55 * (SSBS) feature.
56 */
Dimitris Papastamosb091eb92019-02-27 11:46:48 +000057 ssbs = (read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000058 ID_AA64PFR1_EL1_SSBS_MASK;
59
60 /*
61 * If architectural SSBS is available on this PE, no firmware
62 * mitigation via SMCCC_ARCH_WORKAROUND_2 is required.
63 */
64 if (ssbs != SSBS_UNAVAILABLE)
65 return 1;
66
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010067 /*
68 * On a platform where at least one CPU requires
69 * dynamic mitigation but others are either unaffected
70 * or permanently mitigated, report the latter as not
71 * needing dynamic mitigation.
72 */
73 if (wa_cve_2018_3639_get_disable_ptr() == NULL)
74 return 1;
75 /*
76 * If we get here, this CPU requires dynamic mitigation
77 * so report it as such.
78 */
79 return 0;
80#else
81 /* Either the CPUs are unaffected or permanently mitigated */
Manish V Badarkhe13335172020-02-19 13:36:50 +000082 return SMC_ARCH_CALL_NOT_REQUIRED;
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010083#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000084 }
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +010085#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +000086
87 /* Fallthrough */
88
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000089 default:
90 return SMC_UNK;
91 }
Manish V Badarkhe709bc372020-04-28 13:25:56 +010092}
93
94/* return soc revision or soc version on success otherwise
95 * return invalid parameter */
96static int32_t smccc_arch_id(u_register_t arg1)
97{
98 if (arg1 == SMCCC_GET_SOC_REVISION) {
99 return plat_get_soc_revision();
100 }
101 if (arg1 == SMCCC_GET_SOC_VERSION) {
102 return plat_get_soc_version();
103 }
104 return SMC_ARCH_CALL_INVAL_PARAM;
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000105}
106
107/*
108 * Top-level Arm Architectural Service SMC handler.
109 */
Roberto Vargas05712702018-02-12 12:36:17 +0000110static uintptr_t arm_arch_svc_smc_handler(uint32_t smc_fid,
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000111 u_register_t x1,
112 u_register_t x2,
113 u_register_t x3,
114 u_register_t x4,
115 void *cookie,
116 void *handle,
117 u_register_t flags)
118{
119 switch (smc_fid) {
120 case SMCCC_VERSION:
121 SMC_RET1(handle, smccc_version());
122 case SMCCC_ARCH_FEATURES:
Manish V Badarkhe709bc372020-04-28 13:25:56 +0100123 SMC_RET1(handle, smccc_arch_features(x1));
124 case SMCCC_ARCH_SOC_ID:
125 SMC_RET1(handle, smccc_arch_id(x1));
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000126#if WORKAROUND_CVE_2017_5715
127 case SMCCC_ARCH_WORKAROUND_1:
128 /*
129 * The workaround has already been applied on affected PEs
130 * during entry to EL3. On unaffected PEs, this function
131 * has no effect.
132 */
133 SMC_RET0(handle);
134#endif
Dimitris Papastamose6625ec2018-04-05 14:38:26 +0100135#if WORKAROUND_CVE_2018_3639
136 case SMCCC_ARCH_WORKAROUND_2:
137 /*
138 * The workaround has already been applied on affected PEs
139 * requiring dynamic mitigation during entry to EL3.
140 * On unaffected or statically mitigated PEs, this function
141 * has no effect.
142 */
143 SMC_RET0(handle);
144#endif
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000145 default:
Zelalem Aweke13dc8f12021-07-09 14:20:03 -0500146#if ENABLE_RME
147 /*
148 * RMI functions are allocated from the Arch service range. Call
149 * the RMM dispatcher to handle RMI calls.
150 */
151 if (is_rmi_fid(smc_fid)) {
152 return rmmd_rmi_handler(smc_fid, x1, x2, x3, x4, cookie,
153 handle, flags);
154 }
155#endif
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000156 WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
157 smc_fid);
158 SMC_RET1(handle, SMC_UNK);
159 }
160}
161
162/* Register Standard Service Calls as runtime service */
163DECLARE_RT_SVC(
164 arm_arch_svc,
165 OEN_ARM_START,
166 OEN_ARM_END,
167 SMC_TYPE_FAST,
Zelalem Aweke13dc8f12021-07-09 14:20:03 -0500168#if ENABLE_RME
169 arm_arch_svc_setup,
170#else
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000171 NULL,
Zelalem Aweke13dc8f12021-07-09 14:20:03 -0500172#endif
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +0000173 arm_arch_svc_smc_handler
174);