blob: c2b1f41a15cb2fd9b77d98118225a234c750077b [file] [log] [blame]
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00001/*
Boyan Karatotev8e7c43c2024-10-25 11:38:41 +01002 * Copyright (c) 2018-2024, Arm Limited and Contributors. All rights reserved.
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Antonio Nino Diaz5eb88372018-11-08 10:20:19 +00007#ifndef ARM_ARCH_SVC_H
8#define ARM_ARCH_SVC_H
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +00009
10#define SMCCC_VERSION U(0x80000000)
11#define SMCCC_ARCH_FEATURES U(0x80000001)
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000012#define SMCCC_ARCH_SOC_ID U(0x80000002)
Dimitris Papastamos0dcdb1a2018-01-19 16:58:29 +000013#define SMCCC_ARCH_WORKAROUND_1 U(0x80008000)
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010014#define SMCCC_ARCH_WORKAROUND_2 U(0x80007FFF)
Bipin Ravicaa2e052022-02-23 23:45:50 -060015#define SMCCC_ARCH_WORKAROUND_3 U(0x80003FFF)
Boyan Karatotev8e7c43c2024-10-25 11:38:41 +010016#define SMCCC_ARCH_FEATURE_AVAILABILITY U(0x80000003)
Dimitris Papastamose6625ec2018-04-05 14:38:26 +010017
Manish V Badarkhef809c6e2020-02-22 08:43:00 +000018#define SMCCC_GET_SOC_VERSION U(0)
19#define SMCCC_GET_SOC_REVISION U(1)
20
Boyan Karatotev8e7c43c2024-10-25 11:38:41 +010021#ifndef __ASSEMBLER__
22#if ARCH_FEATURE_AVAILABILITY
23#include <lib/cassert.h>
24
25#if ENABLE_FEAT_FGT2
26#define SCR_FEAT_FGT2 SCR_FGTEN2_BIT
27#else
28#define SCR_FEAT_FGT2 (0)
29#endif
30
31#if ENABLE_FEAT_FPMR
32#define SCR_FEAT_FPMR SCR_EnFPM_BIT
33#else
34#define SCR_FEAT_FPMR
35#endif
36
37#if ENABLE_FEAT_D128
38#define SCR_FEAT_D128 SCR_D128En_BIT
39#else
40#define SCR_FEAT_D128 (0)
41#endif
42
43#if ENABLE_FEAT_S1PIE
44#define SCR_FEAT_S1PIE SCR_PIEN_BIT
45#else
46#define SCR_FEAT_S1PIE (0)
47#endif
48
49#if ENABLE_FEAT_SCTLR2
50#define SCR_FEAT_SCTLR2 SCR_SCTLR2En_BIT
51#else
52#define SCR_FEAT_SCTLR2 (0)
53#endif
54
55#if ENABLE_FEAT_TCR2
56#define SCR_FEAT_TCR2 SCR_TCR2EN_BIT
57#else
58#define SCR_FEAT_TCR2 (0)
59#endif
60
61#if ENABLE_FEAT_THE
62#define SCR_FEAT_THE SCR_RCWMASKEn_BIT
63#else
64#define SCR_FEAT_THE (0)
65#endif
66
67#if ENABLE_SME_FOR_NS
68#define SCR_FEAT_SME SCR_ENTP2_BIT
69#else
70#define SCR_FEAT_SME (0)
71#endif
72
73#if ENABLE_FEAT_GCS
74#define SCR_FEAT_GCS SCR_GCSEn_BIT
75#else
76#define SCR_FEAT_GCS (0)
77#endif
78
79#if ENABLE_FEAT_HCX
80#define SCR_FEAT_HCX SCR_HXEn_BIT
81#else
82#define SCR_FEAT_HCX (0)
83#endif
84
85#if ENABLE_FEAT_LS64_ACCDATA
86#define SCR_FEAT_LS64_ACCDATA (SCR_ADEn_BIT | SCR_EnAS0_BIT)
87#else
88#define SCR_FEAT_LS64_ACCDATA (0)
89#endif
90
91#if ENABLE_FEAT_AMUv1p1
92#define SCR_FEAT_AMUv1p1 SCR_AMVOFFEN_BIT
93#else
94#define SCR_FEAT_AMUv1p1 (0)
95#endif
96
97#if ENABLE_FEAT_ECV
98#define SCR_FEAT_ECV SCR_ECVEN_BIT
99#else
100#define SCR_FEAT_ECV (0)
101#endif
102
103#if ENABLE_FEAT_FGT
104#define SCR_FEAT_FGT SCR_FGTEN_BIT
105#else
106#define SCR_FEAT_FGT (0)
107#endif
108
109#if ENABLE_FEAT_MTE2
110#define SCR_FEAT_MTE2 SCR_ATA_BIT
111#else
112#define SCR_FEAT_MTE2 (0)
113#endif
114
115#if ENABLE_FEAT_CSV2_2
116#define SCR_FEAT_CSV2_2 SCR_EnSCXT_BIT
117#else
118#define SCR_FEAT_CSV2_2 (0)
119#endif
120
121#if ENABLE_FEAT_RAS
122#define SCR_FEAT_RAS SCR_TERR_BIT
123#else
124#define SCR_FEAT_RAS (0)
125#endif
126
127#ifndef SCR_PLAT_FEATS
128#define SCR_PLAT_FEATS (0)
129#endif
130#ifndef SCR_PLAT_FLIPPED
131#define SCR_PLAT_FLIPPED (0)
132#endif
133#ifndef SCR_PLAT_IGNORED
134#define SCR_PLAT_IGNORED (0)
135#endif
136
137#ifndef CPTR_PLAT_FEATS
138#define CPTR_PLAT_FEATS (0)
139#endif
140#ifndef CPTR_PLAT_FLIPPED
141#define CPTR_PLAT_FLIPPED (0)
142#endif
143
144#ifndef MDCR_PLAT_FEATS
145#define MDCR_PLAT_FEATS (0)
146#endif
147#ifndef MDCR_PLAT_FLIPPED
148#define MDCR_PLAT_FLIPPED (0)
149#endif
150#ifndef MDCR_PLAT_IGNORED
151#define MDCR_PLAT_IGNORED (0)
152#endif
153/*
154 * XYZ_EL3_FEATS - list all bits that are relevant for feature enablement. It's
155 * a constant list based on what features are expected. This relies on the fact
156 * that if the feature is in any way disabled, then the relevant bit will not be
157 * written by context management.
158 *
159 * XYZ_EL3_FLIPPED - bits with an active 0, rather than the usual active 1. The
160 * spec always uses active 1 to mean that the feature will not trap.
161 *
162 * XYZ_EL3_IGNORED - list of all bits that are not relevant for feature
163 * enablement and should not be reported to lower ELs
164 */
165#define SCR_EL3_FEATS ( \
166 SCR_FEAT_FGT2 | \
167 SCR_FEAT_FPMR | \
168 SCR_FEAT_D128 | \
169 SCR_FEAT_S1PIE | \
170 SCR_FEAT_SCTLR2 | \
171 SCR_FEAT_TCR2 | \
172 SCR_FEAT_THE | \
173 SCR_FEAT_SME | \
174 SCR_FEAT_GCS | \
175 SCR_FEAT_HCX | \
176 SCR_FEAT_LS64_ACCDATA | \
177 SCR_FEAT_AMUv1p1 | \
178 SCR_FEAT_ECV | \
179 SCR_FEAT_FGT | \
180 SCR_FEAT_MTE2 | \
181 SCR_FEAT_CSV2_2 | \
182 SCR_APK_BIT | /* FEAT_Pauth */ \
183 SCR_FEAT_RAS | \
184 SCR_PLAT_FEATS)
185#define SCR_EL3_FLIPPED ( \
186 SCR_FEAT_RAS | \
187 SCR_PLAT_FLIPPED)
188#define SCR_EL3_IGNORED ( \
189 SCR_API_BIT | \
190 SCR_RW_BIT | \
191 SCR_SIF_BIT | \
192 SCR_HCE_BIT | \
193 SCR_FIQ_BIT | \
194 SCR_IRQ_BIT | \
195 SCR_NS_BIT | \
196 SCR_RES1_BITS | \
197 SCR_PLAT_IGNORED)
198CASSERT((SCR_EL3_FEATS & SCR_EL3_IGNORED) == 0, scr_feat_is_ignored);
199CASSERT((SCR_EL3_FLIPPED & SCR_EL3_FEATS) == SCR_EL3_FLIPPED, scr_flipped_not_a_feat);
200
201#if ENABLE_SYS_REG_TRACE_FOR_NS
202#define CPTR_SYS_REG_TRACE (TCPAC_BIT | TTA_BIT)
203#else
204#define CPTR_SYS_REG_TRACE (0)
205#endif
206
207#if ENABLE_FEAT_AMU
208#define CPTR_FEAT_AMU TAM_BIT
209#else
210#define CPTR_FEAT_AMU (0)
211#endif
212
213#if ENABLE_SME_FOR_NS
214#define CPTR_FEAT_SME ESM_BIT
215#else
216#define CPTR_FEAT_SME (0)
217#endif
218
219#if ENABLE_SVE_FOR_NS
220#define CPTR_FEAT_SVE CPTR_EZ_BIT
221#else
222#define CPTR_FEAT_SVE (0)
223#endif
224
225#define CPTR_EL3_FEATS ( \
226 CPTR_SYS_REG_TRACE | \
227 CPTR_FEAT_AMU | \
228 CPTR_FEAT_SME | \
229 TFP_BIT | \
230 CPTR_FEAT_SVE | \
231 CPTR_PLAT_FEATS)
232#define CPTR_EL3_FLIPPED ( \
233 CPTR_SYS_REG_TRACE | \
234 CPTR_FEAT_AMU | \
235 TFP_BIT | \
236 CPTR_PLAT_FLIPPED)
237CASSERT((CPTR_EL3_FLIPPED & CPTR_EL3_FEATS) == CPTR_EL3_FLIPPED, cptr_flipped_not_a_feat);
238
239/*
240 * Some features enables are expressed with more than 1 bit in order to cater
241 * for multi world enablement. In those cases (BRB, TRB, SPE) only the last bit
242 * is used and reported. This (ab)uses the convenient fact that the last bit
243 * always means "enabled for this world" when context switched correctly.
244 * The per-world values have been adjusted such that this is always true.
245 */
246#if ENABLE_BRBE_FOR_NS
247#define MDCR_FEAT_BRBE MDCR_SBRBE(1UL)
248#else
249#define MDCR_FEAT_BRBE (0)
250#endif
251
252#if ENABLE_FEAT_FGT
253#define MDCR_FEAT_FGT MDCR_TDCC_BIT
254#else
255#define MDCR_FEAT_FGT (0)
256#endif
257
258#if ENABLE_TRBE_FOR_NS
259#define MDCR_FEAT_TRBE MDCR_NSTB(1UL)
260#else
261#define MDCR_FEAT_TRBE (0)
262#endif
263
264#if ENABLE_TRF_FOR_NS
265#define MDCR_FEAT_TRF MDCR_TTRF_BIT
266#else
267#define MDCR_FEAT_TRF (0)
268#endif
269
270#if ENABLE_SPE_FOR_NS
271#define MDCR_FEAT_SPE MDCR_NSPB(1UL)
272#else
273#define MDCR_FEAT_SPE (0)
274#endif
275
276#define MDCR_EL3_FEATS ( \
277 MDCR_FEAT_BRBE | \
278 MDCR_FEAT_FGT | \
279 MDCR_FEAT_TRBE | \
280 MDCR_FEAT_TRF | \
281 MDCR_FEAT_SPE | \
282 MDCR_TDOSA_BIT | \
283 MDCR_TDA_BIT | \
284 MDCR_TPM_BIT | /* FEAT_PMUv3 */ \
285 MDCR_PLAT_FEATS)
286#define MDCR_EL3_FLIPPED ( \
287 MDCR_FEAT_FGT | \
288 MDCR_FEAT_TRF | \
289 MDCR_TDOSA_BIT | \
290 MDCR_TDA_BIT | \
291 MDCR_TPM_BIT | \
292 MDCR_PLAT_FLIPPED)
293#define MDCR_EL3_IGNORED ( \
294 MDCR_EBWE_BIT | \
295 MDCR_EnPMSN_BIT | \
296 MDCR_SBRBE(2UL) | \
297 MDCR_MTPME_BIT | \
298 MDCR_NSTBE_BIT | \
299 MDCR_NSTB(2UL) | \
300 MDCR_SDD_BIT | \
301 MDCR_SPD32(3UL) | \
302 MDCR_NSPB(2UL) | \
303 MDCR_NSPBE_BIT | \
304 MDCR_PLAT_IGNORED)
305CASSERT((MDCR_EL3_FEATS & MDCR_EL3_IGNORED) == 0, mdcr_feat_is_ignored);
306CASSERT((MDCR_EL3_FLIPPED & MDCR_EL3_FEATS) == MDCR_EL3_FLIPPED, mdcr_flipped_not_a_feat);
307
308#define MPAM3_EL3_FEATS (MPAM3_EL3_TRAPLOWER_BIT)
309#define MPAM3_EL3_FLIPPED (MPAM3_EL3_TRAPLOWER_BIT)
310#define MPAM3_EL3_IGNORED (MPAM3_EL3_MPAMEN_BIT)
311CASSERT((MPAM3_EL3_FEATS & MPAM3_EL3_IGNORED) == 0, mpam3_feat_is_ignored);
312CASSERT((MPAM3_EL3_FLIPPED & MPAM3_EL3_FEATS) == MPAM3_EL3_FLIPPED, mpam3_flipped_not_a_feat);
313
314/* The hex representations of these registers' S3 encoding */
315#define SCR_EL3_OPCODE U(0x1E1100)
316#define CPTR_EL3_OPCODE U(0x1E1140)
317#define MDCR_EL3_OPCODE U(0x1E1320)
318#define MPAM3_EL3_OPCODE U(0x1EA500)
319
320#endif /* ARCH_FEATURE_AVAILABILITY */
321#endif /* __ASSEMBLER__ */
Antonio Nino Diaz5eb88372018-11-08 10:20:19 +0000322#endif /* ARM_ARCH_SVC_H */