blob: 6667419abb00cd94a32252d2771440bc1c588418 [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
2 * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
Dan Handley2bd4ef22014-04-09 13:14:54 +010031#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +000032#include <asm_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010033#include <context.h>
Achin Gupta9ac63c52014-01-16 12:08:03 +000034
35/* -----------------------------------------------------
36 * The following function strictly follows the AArch64
37 * PCS to use x9-x17 (temporary caller-saved registers)
38 * to save essential EL3 system register context. It
39 * assumes that 'x0' is pointing to a 'el1_sys_regs'
40 * structure where the register context will be saved.
41 * -----------------------------------------------------
42 */
43 .global el3_sysregs_context_save
Andrew Thoelke38bde412014-03-18 13:46:55 +000044func el3_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +000045
Achin Gupta9ac63c52014-01-16 12:08:03 +000046 mrs x10, sctlr_el3
Andrew Thoelke4e126072014-06-04 21:10:52 +010047 str x10, [x0, #CTX_SCTLR_EL3]
Achin Gupta9ac63c52014-01-16 12:08:03 +000048
49 mrs x11, cptr_el3
50 stp x11, xzr, [x0, #CTX_CPTR_EL3]
51
52 mrs x13, cntfrq_el0
53 mrs x14, mair_el3
54 stp x13, x14, [x0, #CTX_CNTFRQ_EL0]
55
56 mrs x15, tcr_el3
57 mrs x16, ttbr0_el3
58 stp x15, x16, [x0, #CTX_TCR_EL3]
59
60 mrs x17, daif
61 and x17, x17, #(DAIF_ABT_BIT | DAIF_DBG_BIT)
62 stp x17, xzr, [x0, #CTX_DAIF_EL3]
63
64 ret
65
66/* -----------------------------------------------------
67 * The following function strictly follows the AArch64
68 * PCS to use x9-x17 (temporary caller-saved registers)
69 * to restore essential EL3 system register context. It
70 * assumes that 'x0' is pointing to a 'el1_sys_regs'
71 * structure from where the register context will be
72 * restored.
73 *
74 * Note that the sequence differs from that of the save
75 * function as we want the MMU to be enabled last
76 * -----------------------------------------------------
77 */
78 .global el3_sysregs_context_restore
Andrew Thoelke38bde412014-03-18 13:46:55 +000079func el3_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +000080
81 ldp x11, xzr, [x0, #CTX_CPTR_EL3]
82 msr cptr_el3, x11
83
84 ldp x13, x14, [x0, #CTX_CNTFRQ_EL0]
85 msr cntfrq_el0, x13
86 msr mair_el3, x14
87
88 ldp x15, x16, [x0, #CTX_TCR_EL3]
89 msr tcr_el3, x15
90 msr ttbr0_el3, x16
91
92 ldp x17, xzr, [x0, #CTX_DAIF_EL3]
93 mrs x11, daif
94 orr x17, x17, x11
95 msr daif, x17
96
97 /* Make sure all the above changes are observed */
98 isb
99
Andrew Thoelke4e126072014-06-04 21:10:52 +0100100 ldr x10, [x0, #CTX_SCTLR_EL3]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000101 msr sctlr_el3, x10
102 isb
103
104 ret
105
106/* -----------------------------------------------------
107 * The following function strictly follows the AArch64
108 * PCS to use x9-x17 (temporary caller-saved registers)
109 * to save EL1 system register context. It assumes that
110 * 'x0' is pointing to a 'el1_sys_regs' structure where
111 * the register context will be saved.
112 * -----------------------------------------------------
113 */
114 .global el1_sysregs_context_save
Andrew Thoelke38bde412014-03-18 13:46:55 +0000115func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000116
117 mrs x9, spsr_el1
118 mrs x10, elr_el1
119 stp x9, x10, [x0, #CTX_SPSR_EL1]
120
121 mrs x11, spsr_abt
122 mrs x12, spsr_und
123 stp x11, x12, [x0, #CTX_SPSR_ABT]
124
125 mrs x13, spsr_irq
126 mrs x14, spsr_fiq
127 stp x13, x14, [x0, #CTX_SPSR_IRQ]
128
129 mrs x15, sctlr_el1
130 mrs x16, actlr_el1
131 stp x15, x16, [x0, #CTX_SCTLR_EL1]
132
133 mrs x17, cpacr_el1
134 mrs x9, csselr_el1
135 stp x17, x9, [x0, #CTX_CPACR_EL1]
136
137 mrs x10, sp_el1
138 mrs x11, esr_el1
139 stp x10, x11, [x0, #CTX_SP_EL1]
140
141 mrs x12, ttbr0_el1
142 mrs x13, ttbr1_el1
143 stp x12, x13, [x0, #CTX_TTBR0_EL1]
144
145 mrs x14, mair_el1
146 mrs x15, amair_el1
147 stp x14, x15, [x0, #CTX_MAIR_EL1]
148
149 mrs x16, tcr_el1
150 mrs x17, tpidr_el1
151 stp x16, x17, [x0, #CTX_TCR_EL1]
152
153 mrs x9, tpidr_el0
154 mrs x10, tpidrro_el0
155 stp x9, x10, [x0, #CTX_TPIDR_EL0]
156
157 mrs x11, dacr32_el2
158 mrs x12, ifsr32_el2
159 stp x11, x12, [x0, #CTX_DACR32_EL2]
160
161 mrs x13, par_el1
162 mrs x14, far_el1
163 stp x13, x14, [x0, #CTX_PAR_EL1]
164
165 mrs x15, afsr0_el1
166 mrs x16, afsr1_el1
167 stp x15, x16, [x0, #CTX_AFSR0_EL1]
168
169 mrs x17, contextidr_el1
170 mrs x9, vbar_el1
171 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
172
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100173 /* Save NS timer registers if the build has instructed so */
174#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000175 mrs x10, cntp_ctl_el0
176 mrs x11, cntp_cval_el0
177 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
178
179 mrs x12, cntv_ctl_el0
180 mrs x13, cntv_cval_el0
181 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
182
183 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100184 str x14, [x0, #CTX_CNTKCTL_EL1]
185#endif
186
Achin Gupta9ac63c52014-01-16 12:08:03 +0000187 mrs x15, fpexc32_el2
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100188 str x15, [x0, #CTX_FP_FPEXC32_EL2]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000189
190 ret
191
192/* -----------------------------------------------------
193 * The following function strictly follows the AArch64
194 * PCS to use x9-x17 (temporary caller-saved registers)
195 * to restore EL1 system register context. It assumes
196 * that 'x0' is pointing to a 'el1_sys_regs' structure
197 * from where the register context will be restored
198 * -----------------------------------------------------
199 */
200 .global el1_sysregs_context_restore
Andrew Thoelke38bde412014-03-18 13:46:55 +0000201func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000202
203 ldp x9, x10, [x0, #CTX_SPSR_EL1]
204 msr spsr_el1, x9
205 msr elr_el1, x10
206
207 ldp x11, x12, [x0, #CTX_SPSR_ABT]
208 msr spsr_abt, x11
209 msr spsr_und, x12
210
211 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
212 msr spsr_irq, x13
213 msr spsr_fiq, x14
214
215 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
216 msr sctlr_el1, x15
217 msr actlr_el1, x16
218
219 ldp x17, x9, [x0, #CTX_CPACR_EL1]
220 msr cpacr_el1, x17
221 msr csselr_el1, x9
222
223 ldp x10, x11, [x0, #CTX_SP_EL1]
224 msr sp_el1, x10
225 msr esr_el1, x11
226
227 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
228 msr ttbr0_el1, x12
229 msr ttbr1_el1, x13
230
231 ldp x14, x15, [x0, #CTX_MAIR_EL1]
232 msr mair_el1, x14
233 msr amair_el1, x15
234
235 ldp x16, x17, [x0, #CTX_TCR_EL1]
236 msr tcr_el1, x16
237 msr tpidr_el1, x17
238
239 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
240 msr tpidr_el0, x9
241 msr tpidrro_el0, x10
242
243 ldp x11, x12, [x0, #CTX_DACR32_EL2]
244 msr dacr32_el2, x11
245 msr ifsr32_el2, x12
246
247 ldp x13, x14, [x0, #CTX_PAR_EL1]
248 msr par_el1, x13
249 msr far_el1, x14
250
251 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
252 msr afsr0_el1, x15
253 msr afsr1_el1, x16
254
255 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
256 msr contextidr_el1, x17
257 msr vbar_el1, x9
258
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100259 /* Restore NS timer registers if the build has instructed so */
260#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000261 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
262 msr cntp_ctl_el0, x10
263 msr cntp_cval_el0, x11
264
265 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
266 msr cntv_ctl_el0, x12
267 msr cntv_cval_el0, x13
268
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100269 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000270 msr cntkctl_el1, x14
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100271#endif
272
273 ldr x15, [x0, #CTX_FP_FPEXC32_EL2]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000274 msr fpexc32_el2, x15
275
276 /* No explict ISB required here as ERET covers it */
277
278 ret
279
280/* -----------------------------------------------------
281 * The followsing function follows the aapcs_64 strictly
282 * to use x9-x17 (temporary caller-saved registers
283 * according to AArch64 PCS) to save floating point
284 * register context. It assumes that 'x0' is pointing to
285 * a 'fp_regs' structure where the register context will
286 * be saved.
287 *
288 * Access to VFP registers will trap if CPTR_EL3.TFP is
289 * set. However currently we don't use VFP registers
290 * nor set traps in Trusted Firmware, and assume it's
291 * cleared
292 *
293 * TODO: Revisit when VFP is used in secure world
294 * -----------------------------------------------------
295 */
296 .global fpregs_context_save
Andrew Thoelke38bde412014-03-18 13:46:55 +0000297func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000298 stp q0, q1, [x0, #CTX_FP_Q0]
299 stp q2, q3, [x0, #CTX_FP_Q2]
300 stp q4, q5, [x0, #CTX_FP_Q4]
301 stp q6, q7, [x0, #CTX_FP_Q6]
302 stp q8, q9, [x0, #CTX_FP_Q8]
303 stp q10, q11, [x0, #CTX_FP_Q10]
304 stp q12, q13, [x0, #CTX_FP_Q12]
305 stp q14, q15, [x0, #CTX_FP_Q14]
306 stp q16, q17, [x0, #CTX_FP_Q16]
307 stp q18, q19, [x0, #CTX_FP_Q18]
308 stp q20, q21, [x0, #CTX_FP_Q20]
309 stp q22, q23, [x0, #CTX_FP_Q22]
310 stp q24, q25, [x0, #CTX_FP_Q24]
311 stp q26, q27, [x0, #CTX_FP_Q26]
312 stp q28, q29, [x0, #CTX_FP_Q28]
313 stp q30, q31, [x0, #CTX_FP_Q30]
314
315 mrs x9, fpsr
316 str x9, [x0, #CTX_FP_FPSR]
317
318 mrs x10, fpcr
319 str x10, [x0, #CTX_FP_FPCR]
320
321 ret
322
323/* -----------------------------------------------------
324 * The following function follows the aapcs_64 strictly
325 * to use x9-x17 (temporary caller-saved registers
326 * according to AArch64 PCS) to restore floating point
327 * register context. It assumes that 'x0' is pointing to
328 * a 'fp_regs' structure from where the register context
329 * will be restored.
330 *
331 * Access to VFP registers will trap if CPTR_EL3.TFP is
332 * set. However currently we don't use VFP registers
333 * nor set traps in Trusted Firmware, and assume it's
334 * cleared
335 *
336 * TODO: Revisit when VFP is used in secure world
337 * -----------------------------------------------------
338 */
339 .global fpregs_context_restore
Andrew Thoelke38bde412014-03-18 13:46:55 +0000340func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000341 ldp q0, q1, [x0, #CTX_FP_Q0]
342 ldp q2, q3, [x0, #CTX_FP_Q2]
343 ldp q4, q5, [x0, #CTX_FP_Q4]
344 ldp q6, q7, [x0, #CTX_FP_Q6]
345 ldp q8, q9, [x0, #CTX_FP_Q8]
346 ldp q10, q11, [x0, #CTX_FP_Q10]
347 ldp q12, q13, [x0, #CTX_FP_Q12]
348 ldp q14, q15, [x0, #CTX_FP_Q14]
349 ldp q16, q17, [x0, #CTX_FP_Q16]
350 ldp q18, q19, [x0, #CTX_FP_Q18]
351 ldp q20, q21, [x0, #CTX_FP_Q20]
352 ldp q22, q23, [x0, #CTX_FP_Q22]
353 ldp q24, q25, [x0, #CTX_FP_Q24]
354 ldp q26, q27, [x0, #CTX_FP_Q26]
355 ldp q28, q29, [x0, #CTX_FP_Q28]
356 ldp q30, q31, [x0, #CTX_FP_Q30]
357
358 ldr x9, [x0, #CTX_FP_FPSR]
359 msr fpsr, x9
360
361 str x10, [x0, #CTX_FP_FPCR]
362 msr fpcr, x10
363
364 /*
365 * No explict ISB required here as ERET to
366 * swtich to secure EL1 or non-secure world
367 * covers it
368 */
369
370 ret