blob: 9eeec1899889cfaafe3c59d8ca30409e607d68c1 [file] [log] [blame]
Achin Gupta07f4e072014-02-02 12:02:23 +00001/*
2 * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30 /* ---------------------------------------------
31 * Zero out the callee saved register to prevent
32 * leakage of secure state into the normal world
33 * during the first ERET after a cold/warm boot.
34 * ---------------------------------------------
35 */
36 .macro zero_callee_saved_regs
37 mov x19, xzr
38 mov x20, xzr
39 mov x21, xzr
40 mov x22, xzr
41 mov x23, xzr
42 mov x24, xzr
43 mov x25, xzr
44 mov x26, xzr
45 mov x27, xzr
46 mov x28, xzr
47 mov x29, xzr
48 .endm
49
50 .macro switch_to_exception_stack reg1 reg2
51 mov \reg1 , sp
52 ldr \reg2, [\reg1, #CTX_EL3STATE_OFFSET + CTX_EXCEPTION_SP]
53 mov sp, \reg2
54 .endm
55
56 /* -----------------------------------------------------
57 * Handle SMC exceptions seperately from other sync.
58 * exceptions.
59 * -----------------------------------------------------
60 */
61 .macro handle_sync_exception
62 stp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
63 mrs x30, esr_el3
64 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
65
66 cmp x30, #EC_AARCH32_SMC
67 b.eq smc_handler32
68
69 cmp x30, #EC_AARCH64_SMC
70 b.eq smc_handler64
71
72 /* -----------------------------------------------------
73 * The following code handles any synchronous exception
74 * that is not an SMC. SP_EL3 is pointing to a context
75 * structure where all the scratch registers are saved.
76 * An exception stack is also retrieved from the context
77 * Currently, a register dump is printed since BL31 does
78 * not expect any such exceptions.
79 * -----------------------------------------------------
80 */
81 bl save_scratch_registers
82 switch_to_exception_stack x0 x1
83
84 /* Save the core_context pointer for handled faults */
85 stp x0, xzr, [sp, #-0x10]!
86 bl fault_handler
87 ldp x0, xzr, [sp], #0x10
88
89 mov sp, x0
90 bl restore_scratch_registers
91 ldp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
92 eret
93 .endm
94
95 /* -----------------------------------------------------
96 * Use a platform defined mechanism to report an async.
97 * exception.
98 * -----------------------------------------------------
99 */
100 .macro handle_async_exception type
101 stp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
102 bl save_scratch_registers
103 switch_to_exception_stack x0 x1
104
105 /* Save the core_context pointer */
106 stp x0, xzr, [sp, #-0x10]!
107 mov x0, \type
108 bl plat_report_exception
109 ldp x0, xzr, [sp], #0x10
110
111 mov sp, x0
112 bl restore_scratch_registers
113 ldp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
114 .endm
115