blob: e0b1f56414a9711ad6fe5e52f7b3dbadb5e37577 [file] [log] [blame]
Dimitris Papastamos525c37a2017-11-13 09:49:45 +00001/*
2 * Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <assert_macros.S>
9#include <asm_macros.S>
10
11 .globl amu_group0_cnt_read_internal
12 .globl amu_group0_cnt_write_internal
13 .globl amu_group1_cnt_read_internal
14 .globl amu_group1_cnt_write_internal
15 .globl amu_group1_set_evtype_internal
16
17/*
18 * uint64_t amu_group0_cnt_read_internal(int idx);
19 *
20 * Given `idx`, read the corresponding AMU counter
21 * and return it in `x0`.
22 */
23func amu_group0_cnt_read_internal
24#if ENABLE_ASSERTIONS
25 /*
26 * It can be dangerous to call this function with an
27 * out of bounds index. Ensure `idx` is valid.
28 */
29 mov x1, x0
30 lsr x1, x1, #2
31 cmp x1, #0
32 ASM_ASSERT(eq)
33#endif
34
35 /*
36 * Given `idx` calculate address of mrs/ret instruction pair
37 * in the table below.
38 */
39 adr x1, 1f
40 lsl x0, x0, #3 /* each mrs/ret sequence is 8 bytes */
41 add x1, x1, x0
42 br x1
43
441:
45 mrs x0, AMEVCNTR00_EL0 /* index 0 */
46 ret
47 mrs x0, AMEVCNTR01_EL0 /* index 1 */
48 ret
49 mrs x0, AMEVCNTR02_EL0 /* index 2 */
50 ret
51 mrs x0, AMEVCNTR03_EL0 /* index 3 */
52 ret
53endfunc amu_group0_cnt_read_internal
54
55/*
56 * void amu_group0_cnt_write_internal(int idx, uint64_t val);
57 *
58 * Given `idx`, write `val` to the corresponding AMU counter.
59 */
60func amu_group0_cnt_write_internal
61#if ENABLE_ASSERTIONS
62 /*
63 * It can be dangerous to call this function with an
64 * out of bounds index. Ensure `idx` is valid.
65 */
66 mov x2, x0
67 lsr x2, x2, #2
68 cmp x2, #0
69 ASM_ASSERT(eq)
70#endif
71
72 /*
73 * Given `idx` calculate address of mrs/ret instruction pair
74 * in the table below.
75 */
76 adr x2, 1f
77 lsl x0, x0, #3 /* each msr/ret sequence is 8 bytes */
78 add x2, x2, x0
79 br x2
80
811:
82 msr AMEVCNTR00_EL0, x1 /* index 0 */
83 ret
84 msr AMEVCNTR01_EL0, x1 /* index 1 */
85 ret
86 msr AMEVCNTR02_EL0, x1 /* index 2 */
87 ret
88 msr AMEVCNTR03_EL0, x1 /* index 3 */
89 ret
90endfunc amu_group0_cnt_write_internal
91
92/*
93 * uint64_t amu_group1_cnt_read_internal(int idx);
94 *
95 * Given `idx`, read the corresponding AMU counter
96 * and return it in `x0`.
97 */
98func amu_group1_cnt_read_internal
99#if ENABLE_ASSERTIONS
100 /*
101 * It can be dangerous to call this function with an
102 * out of bounds index. Ensure `idx` is valid.
103 */
104 mov x1, x0
105 lsr x1, x1, #4
106 cmp x1, #0
107 ASM_ASSERT(eq)
108#endif
109
110 /*
111 * Given `idx` calculate address of mrs/ret instruction pair
112 * in the table below.
113 */
114 adr x1, 1f
115 lsl x0, x0, #3 /* each mrs/ret sequence is 8 bytes */
116 add x1, x1, x0
117 br x1
118
1191:
120 mrs x0, AMEVCNTR10_EL0 /* index 0 */
121 ret
122 mrs x0, AMEVCNTR11_EL0 /* index 1 */
123 ret
124 mrs x0, AMEVCNTR12_EL0 /* index 2 */
125 ret
126 mrs x0, AMEVCNTR13_EL0 /* index 3 */
127 ret
128 mrs x0, AMEVCNTR14_EL0 /* index 4 */
129 ret
130 mrs x0, AMEVCNTR15_EL0 /* index 5 */
131 ret
132 mrs x0, AMEVCNTR16_EL0 /* index 6 */
133 ret
134 mrs x0, AMEVCNTR17_EL0 /* index 7 */
135 ret
136 mrs x0, AMEVCNTR18_EL0 /* index 8 */
137 ret
138 mrs x0, AMEVCNTR19_EL0 /* index 9 */
139 ret
140 mrs x0, AMEVCNTR1A_EL0 /* index 10 */
141 ret
142 mrs x0, AMEVCNTR1B_EL0 /* index 11 */
143 ret
144 mrs x0, AMEVCNTR1C_EL0 /* index 12 */
145 ret
146 mrs x0, AMEVCNTR1D_EL0 /* index 13 */
147 ret
148 mrs x0, AMEVCNTR1E_EL0 /* index 14 */
149 ret
150 mrs x0, AMEVCNTR1F_EL0 /* index 15 */
151 ret
152endfunc amu_group1_cnt_read_internal
153
154/*
155 * void amu_group1_cnt_write_internal(int idx, uint64_t val);
156 *
157 * Given `idx`, write `val` to the corresponding AMU counter.
158 */
159func amu_group1_cnt_write_internal
160#if ENABLE_ASSERTIONS
161 /*
162 * It can be dangerous to call this function with an
163 * out of bounds index. Ensure `idx` is valid.
164 */
165 mov x2, x0
166 lsr x2, x2, #4
167 cmp x2, #0
168 ASM_ASSERT(eq)
169#endif
170
171 /*
172 * Given `idx` calculate address of mrs/ret instruction pair
173 * in the table below.
174 */
175 adr x2, 1f
176 lsl x0, x0, #3 /* each msr/ret sequence is 8 bytes */
177 add x2, x2, x0
178 br x2
179
1801:
181 msr AMEVCNTR10_EL0, x1 /* index 0 */
182 ret
183 msr AMEVCNTR11_EL0, x1 /* index 1 */
184 ret
185 msr AMEVCNTR12_EL0, x1 /* index 2 */
186 ret
187 msr AMEVCNTR13_EL0, x1 /* index 3 */
188 ret
189 msr AMEVCNTR14_EL0, x1 /* index 4 */
190 ret
191 msr AMEVCNTR15_EL0, x1 /* index 5 */
192 ret
193 msr AMEVCNTR16_EL0, x1 /* index 6 */
194 ret
195 msr AMEVCNTR17_EL0, x1 /* index 7 */
196 ret
197 msr AMEVCNTR18_EL0, x1 /* index 8 */
198 ret
199 msr AMEVCNTR19_EL0, x1 /* index 9 */
200 ret
201 msr AMEVCNTR1A_EL0, x1 /* index 10 */
202 ret
203 msr AMEVCNTR1B_EL0, x1 /* index 11 */
204 ret
205 msr AMEVCNTR1C_EL0, x1 /* index 12 */
206 ret
207 msr AMEVCNTR1D_EL0, x1 /* index 13 */
208 ret
209 msr AMEVCNTR1E_EL0, x1 /* index 14 */
210 ret
211 msr AMEVCNTR1F_EL0, x1 /* index 15 */
212 ret
213endfunc amu_group1_cnt_write_internal
214
215/*
216 * void amu_group1_set_evtype_internal(int idx, unsigned int val);
217 *
218 * Program the AMU event type register indexed by `idx`
219 * with the value `val`.
220 */
221func amu_group1_set_evtype_internal
222#if ENABLE_ASSERTIONS
223 /*
224 * It can be dangerous to call this function with an
225 * out of bounds index. Ensure `idx` is valid.
226 */
227 mov x2, x0
228 lsr x2, x2, #4
229 cmp x2, #0
230 ASM_ASSERT(eq)
231
232 /* val should be between [0, 65535] */
233 mov x2, x1
234 lsr x2, x2, #16
235 cmp x2, #0
236 ASM_ASSERT(eq)
237#endif
238
239 /*
240 * Given `idx` calculate address of msr/ret instruction pair
241 * in the table below.
242 */
243 adr x2, 1f
244 lsl x0, x0, #3 /* each msr/ret sequence is 8 bytes */
245 add x2, x2, x0
246 br x2
247
2481:
249 msr AMEVTYPER10_EL0, x1 /* index 0 */
250 ret
251 msr AMEVTYPER11_EL0, x1 /* index 1 */
252 ret
253 msr AMEVTYPER12_EL0, x1 /* index 2 */
254 ret
255 msr AMEVTYPER13_EL0, x1 /* index 3 */
256 ret
257 msr AMEVTYPER14_EL0, x1 /* index 4 */
258 ret
259 msr AMEVTYPER15_EL0, x1 /* index 5 */
260 ret
261 msr AMEVTYPER16_EL0, x1 /* index 6 */
262 ret
263 msr AMEVTYPER17_EL0, x1 /* index 7 */
264 ret
265 msr AMEVTYPER18_EL0, x1 /* index 8 */
266 ret
267 msr AMEVTYPER19_EL0, x1 /* index 9 */
268 ret
269 msr AMEVTYPER1A_EL0, x1 /* index 10 */
270 ret
271 msr AMEVTYPER1B_EL0, x1 /* index 11 */
272 ret
273 msr AMEVTYPER1C_EL0, x1 /* index 12 */
274 ret
275 msr AMEVTYPER1D_EL0, x1 /* index 13 */
276 ret
277 msr AMEVTYPER1E_EL0, x1 /* index 14 */
278 ret
279 msr AMEVTYPER1F_EL0, x1 /* index 15 */
280 ret
281endfunc amu_group1_set_evtype_internal