blob: ed9bc9518f0eefc753c0d55a4f10bb522a899cbd [file] [log] [blame]
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00001/*
Arvind Ram Prakashfaa857d2025-01-28 17:21:17 -06002 * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved.
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Antonio Nino Diaz5eb88372018-11-08 10:20:19 +00007#ifndef ARM_ARCH_SVC_H
8#define ARM_ARCH_SVC_H
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00009
10#define SMCCC_VERSION U(0x80000000)
11#define SMCCC_ARCH_FEATURES U(0x80000001)
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000012#define SMCCC_ARCH_SOC_ID U(0x80000002)
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000013#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010014#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
Bipin Ravicaa2e052022-02-23 23:45:50 -060015#define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF)
Boyan Karatotev8e7c43c2024-10-25 11:38:41 +010016#define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003)
Arvind Ram Prakashfaa857d2025-01-28 17:21:17 -060017#define SMCCC_ARCH_WORKAROUND_4 U(0x80000004)
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010018
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000019#define SMCCC_GET_SOC_VERSION U(0)
20#define SMCCC_GET_SOC_REVISION U(1)
21
Boyan Karatotev8e7c43c2024-10-25 11:38:41 +010022#ifndef __ASSEMBLER__
23#if ARCH_FEATURE_AVAILABILITY
24#include <lib/cassert.h>
25
26#if ENABLE_FEAT_FGT2
27#define SCR_FEAT_FGT2 SCR_FGTEN2_BIT
28#else
29#define SCR_FEAT_FGT2 (0)
30#endif
31
32#if ENABLE_FEAT_FPMR
33#define SCR_FEAT_FPMR SCR_EnFPM_BIT
34#else
35#define SCR_FEAT_FPMR
36#endif
37
38#if ENABLE_FEAT_D128
39#define SCR_FEAT_D128 SCR_D128En_BIT
40#else
41#define SCR_FEAT_D128 (0)
42#endif
43
44#if ENABLE_FEAT_S1PIE
45#define SCR_FEAT_S1PIE SCR_PIEN_BIT
46#else
47#define SCR_FEAT_S1PIE (0)
48#endif
49
50#if ENABLE_FEAT_SCTLR2
51#define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT
52#else
53#define SCR_FEAT_SCTLR2 (0)
54#endif
55
56#if ENABLE_FEAT_TCR2
57#define SCR_FEAT_TCR2 SCR_TCR2EN_BIT
58#else
59#define SCR_FEAT_TCR2 (0)
60#endif
61
62#if ENABLE_FEAT_THE
63#define SCR_FEAT_THE SCR_RCWMASKEn_BIT
64#else
65#define SCR_FEAT_THE (0)
66#endif
67
68#if ENABLE_SME_FOR_NS
69#define SCR_FEAT_SME SCR_ENTP2_BIT
70#else
71#define SCR_FEAT_SME (0)
72#endif
73
74#if ENABLE_FEAT_GCS
75#define SCR_FEAT_GCS SCR_GCSEn_BIT
76#else
77#define SCR_FEAT_GCS (0)
78#endif
79
80#if ENABLE_FEAT_HCX
81#define SCR_FEAT_HCX SCR_HXEn_BIT
82#else
83#define SCR_FEAT_HCX (0)
84#endif
85
86#if ENABLE_FEAT_LS64_ACCDATA
87#define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT)
88#else
89#define SCR_FEAT_LS64_ACCDATA (0)
90#endif
91
92#if ENABLE_FEAT_AMUv1p1
93#define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT
94#else
95#define SCR_FEAT_AMUv1p1 (0)
96#endif
97
98#if ENABLE_FEAT_ECV
99#define SCR_FEAT_ECV SCR_ECVEN_BIT
100#else
101#define SCR_FEAT_ECV (0)
102#endif
103
104#if ENABLE_FEAT_FGT
105#define SCR_FEAT_FGT SCR_FGTEN_BIT
106#else
107#define SCR_FEAT_FGT (0)
108#endif
109
110#if ENABLE_FEAT_MTE2
111#define SCR_FEAT_MTE2 SCR_ATA_BIT
112#else
113#define SCR_FEAT_MTE2 (0)
114#endif
115
116#if ENABLE_FEAT_CSV2_2
117#define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT
118#else
119#define SCR_FEAT_CSV2_2 (0)
120#endif
121
122#if ENABLE_FEAT_RAS
123#define SCR_FEAT_RAS SCR_TERR_BIT
124#else
125#define SCR_FEAT_RAS (0)
126#endif
127
128#ifndef SCR_PLAT_FEATS
129#define SCR_PLAT_FEATS (0)
130#endif
131#ifndef SCR_PLAT_FLIPPED
132#define SCR_PLAT_FLIPPED (0)
133#endif
134#ifndef SCR_PLAT_IGNORED
135#define SCR_PLAT_IGNORED (0)
136#endif
137
138#ifndef CPTR_PLAT_FEATS
139#define CPTR_PLAT_FEATS (0)
140#endif
141#ifndef CPTR_PLAT_FLIPPED
142#define CPTR_PLAT_FLIPPED (0)
143#endif
144
145#ifndef MDCR_PLAT_FEATS
146#define MDCR_PLAT_FEATS (0)
147#endif
148#ifndef MDCR_PLAT_FLIPPED
149#define MDCR_PLAT_FLIPPED (0)
150#endif
151#ifndef MDCR_PLAT_IGNORED
152#define MDCR_PLAT_IGNORED (0)
153#endif
154/*
155 * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's
156 * a constant list based on what features are expected. This relies on the fact
157 * that if the feature is in any way disabled, then the relevant bit will not be
158 * written by context management.
159 *
160 * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The
161 * spec always uses active 1 to mean that the feature will not trap.
162 *
163 * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature
164 * enablement and should not be reported to lower ELs
165 */
166#define SCR_EL3_FEATS ( \
167 SCR_FEAT_FGT2 | \
168 SCR_FEAT_FPMR | \
169 SCR_FEAT_D128 | \
170 SCR_FEAT_S1PIE | \
171 SCR_FEAT_SCTLR2 | \
172 SCR_FEAT_TCR2 | \
173 SCR_FEAT_THE | \
174 SCR_FEAT_SME | \
175 SCR_FEAT_GCS | \
176 SCR_FEAT_HCX | \
177 SCR_FEAT_LS64_ACCDATA | \
178 SCR_FEAT_AMUv1p1 | \
179 SCR_FEAT_ECV | \
180 SCR_FEAT_FGT | \
181 SCR_FEAT_MTE2 | \
182 SCR_FEAT_CSV2_2 | \
183 SCR_APK_BIT | /* FEAT_Pauth */ \
184 SCR_FEAT_RAS | \
185 SCR_PLAT_FEATS)
186#define SCR_EL3_FLIPPED ( \
187 SCR_FEAT_RAS | \
188 SCR_PLAT_FLIPPED)
189#define SCR_EL3_IGNORED ( \
190 SCR_API_BIT | \
191 SCR_RW_BIT | \
192 SCR_SIF_BIT | \
193 SCR_HCE_BIT | \
194 SCR_FIQ_BIT | \
195 SCR_IRQ_BIT | \
196 SCR_NS_BIT | \
197 SCR_RES1_BITS | \
198 SCR_PLAT_IGNORED)
199CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored);
200CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat);
201
202#if ENABLE_SYS_REG_TRACE_FOR_NS
203#define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT)
204#else
205#define CPTR_SYS_REG_TRACE (0)
206#endif
207
208#if ENABLE_FEAT_AMU
209#define CPTR_FEAT_AMU TAM_BIT
210#else
211#define CPTR_FEAT_AMU (0)
212#endif
213
214#if ENABLE_SME_FOR_NS
215#define CPTR_FEAT_SME ESM_BIT
216#else
217#define CPTR_FEAT_SME (0)
218#endif
219
220#if ENABLE_SVE_FOR_NS
221#define CPTR_FEAT_SVE CPTR_EZ_BIT
222#else
223#define CPTR_FEAT_SVE (0)
224#endif
225
226#define CPTR_EL3_FEATS ( \
227 CPTR_SYS_REG_TRACE | \
228 CPTR_FEAT_AMU | \
229 CPTR_FEAT_SME | \
230 TFP_BIT | \
231 CPTR_FEAT_SVE | \
232 CPTR_PLAT_FEATS)
233#define CPTR_EL3_FLIPPED ( \
234 CPTR_SYS_REG_TRACE | \
235 CPTR_FEAT_AMU | \
236 TFP_BIT | \
237 CPTR_PLAT_FLIPPED)
238CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat);
239
240/*
241 * Some features enables are expressed with more than 1 bit in order to cater
242 * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit
243 * is used and reported. This (ab)uses the convenient fact that the last bit
244 * always means "enabled for this world" when context switched correctly.
245 * The per-world values have been adjusted such that this is always true.
246 */
247#if ENABLE_BRBE_FOR_NS
248#define MDCR_FEAT_BRBE MDCR_SBRBE(1UL)
249#else
250#define MDCR_FEAT_BRBE (0)
251#endif
252
253#if ENABLE_FEAT_FGT
254#define MDCR_FEAT_FGT MDCR_TDCC_BIT
255#else
256#define MDCR_FEAT_FGT (0)
257#endif
258
259#if ENABLE_TRBE_FOR_NS
260#define MDCR_FEAT_TRBE MDCR_NSTB(1UL)
261#else
262#define MDCR_FEAT_TRBE (0)
263#endif
264
265#if ENABLE_TRF_FOR_NS
266#define MDCR_FEAT_TRF MDCR_TTRF_BIT
267#else
268#define MDCR_FEAT_TRF (0)
269#endif
270
271#if ENABLE_SPE_FOR_NS
272#define MDCR_FEAT_SPE MDCR_NSPB(1UL)
273#else
274#define MDCR_FEAT_SPE (0)
275#endif
276
277#define MDCR_EL3_FEATS ( \
278 MDCR_FEAT_BRBE | \
279 MDCR_FEAT_FGT | \
280 MDCR_FEAT_TRBE | \
281 MDCR_FEAT_TRF | \
282 MDCR_FEAT_SPE | \
283 MDCR_TDOSA_BIT | \
284 MDCR_TDA_BIT | \
285 MDCR_TPM_BIT | /* FEAT_PMUv3 */ \
286 MDCR_PLAT_FEATS)
287#define MDCR_EL3_FLIPPED ( \
288 MDCR_FEAT_FGT | \
289 MDCR_FEAT_TRF | \
290 MDCR_TDOSA_BIT | \
291 MDCR_TDA_BIT | \
292 MDCR_TPM_BIT | \
293 MDCR_PLAT_FLIPPED)
294#define MDCR_EL3_IGNORED ( \
295 MDCR_EBWE_BIT | \
296 MDCR_EnPMSN_BIT | \
297 MDCR_SBRBE(2UL) | \
298 MDCR_MTPME_BIT | \
299 MDCR_NSTBE_BIT | \
300 MDCR_NSTB(2UL) | \
301 MDCR_SDD_BIT | \
302 MDCR_SPD32(3UL) | \
303 MDCR_NSPB(2UL) | \
304 MDCR_NSPBE_BIT | \
305 MDCR_PLAT_IGNORED)
306CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored);
307CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat);
308
309#define MPAM3_EL3_FEATS (MPAM3_EL3_TRAPLOWER_BIT)
310#define MPAM3_EL3_FLIPPED (MPAM3_EL3_TRAPLOWER_BIT)
311#define MPAM3_EL3_IGNORED (MPAM3_EL3_MPAMEN_BIT)
312CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored);
313CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat);
314
315/* The hex representations of these registers' S3 encoding */
316#define SCR_EL3_OPCODE U(0x1E1100)
317#define CPTR_EL3_OPCODE U(0x1E1140)
318#define MDCR_EL3_OPCODE U(0x1E1320)
319#define MPAM3_EL3_OPCODE U(0x1EA500)
320
321#endif /* ARCH_FEATURE_AVAILABILITY */
322#endif /* __ASSEMBLER__ */
Antonio Nino Diaz5eb88372018-11-08 10:20:19 +0000323#endif /* ARM_ARCH_SVC_H */