blob: 7c8e643d1140c3bcd4634f1afac64f0b867ff673 [file] [log] [blame]
Achin Gupta4f6ad662013-10-25 09:08:21 +01001/*
Jeenu Viswambharan9a7ce2f2018-04-04 16:07:11 +01002 * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
Achin Gupta4f6ad662013-10-25 09:08:21 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta4f6ad662013-10-25 09:08:21 +01005 */
Dan Handleyea596682015-04-01 17:34:24 +01006#ifndef __ASM_MACROS_S__
7#define __ASM_MACROS_S__
Achin Gupta4f6ad662013-10-25 09:08:21 +01008
Dan Handley2bd4ef22014-04-09 13:14:54 +01009#include <arch.h>
Soby Mathewb9ff2fd2016-07-08 15:26:35 +010010#include <asm_macros_common.S>
Jeenu Viswambharan54ec86a2017-01-19 14:23:36 +000011#include <spinlock.h>
Dan Handley2bd4ef22014-04-09 13:14:54 +010012
13
Achin Gupta4f6ad662013-10-25 09:08:21 +010014 .macro func_prologue
15 stp x29, x30, [sp, #-0x10]!
16 mov x29,sp
17 .endm
18
19 .macro func_epilogue
20 ldp x29, x30, [sp], #0x10
21 .endm
22
23
24 .macro dcache_line_size reg, tmp
Achin Gupta07f4e072014-02-02 12:02:23 +000025 mrs \tmp, ctr_el0
Achin Gupta4f6ad662013-10-25 09:08:21 +010026 ubfx \tmp, \tmp, #16, #4
Achin Gupta07f4e072014-02-02 12:02:23 +000027 mov \reg, #4
28 lsl \reg, \reg, \tmp
Achin Gupta4f6ad662013-10-25 09:08:21 +010029 .endm
30
31
32 .macro icache_line_size reg, tmp
Achin Gupta07f4e072014-02-02 12:02:23 +000033 mrs \tmp, ctr_el0
34 and \tmp, \tmp, #0xf
35 mov \reg, #4
36 lsl \reg, \reg, \tmp
Achin Gupta4f6ad662013-10-25 09:08:21 +010037 .endm
38
39
Achin Gupta4f6ad662013-10-25 09:08:21 +010040 .macro smc_check label
Andrew Thoelkef977ed82014-04-28 12:32:02 +010041 mrs x0, esr_el3
Achin Gupta4f6ad662013-10-25 09:08:21 +010042 ubfx x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
43 cmp x0, #EC_AARCH64_SMC
44 b.ne $label
45 .endm
46
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010047 /*
48 * Declare the exception vector table, enforcing it is aligned on a
49 * 2KB boundary, as required by the ARMv8 architecture.
Sandrine Bailleux618ba992016-05-24 16:22:59 +010050 * Use zero bytes as the fill value to be stored in the padding bytes
51 * so that it inserts illegal AArch64 instructions. This increases
52 * security, robustness and potentially facilitates debugging.
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010053 */
Antonio Nino Diazc41f2062017-10-24 10:07:35 +010054 .macro vector_base label, section_name=.vectors
55 .section \section_name, "ax"
Sandrine Bailleux618ba992016-05-24 16:22:59 +010056 .align 11, 0
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010057 \label:
58 .endm
59
60 /*
61 * Create an entry in the exception vector table, enforcing it is
62 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
Sandrine Bailleux618ba992016-05-24 16:22:59 +010063 * Use zero bytes as the fill value to be stored in the padding bytes
64 * so that it inserts illegal AArch64 instructions. This increases
65 * security, robustness and potentially facilitates debugging.
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010066 */
Antonio Nino Diazc41f2062017-10-24 10:07:35 +010067 .macro vector_entry label, section_name=.vectors
Douglas Raillardefa50b52017-08-07 16:20:46 +010068 .cfi_sections .debug_frame
Antonio Nino Diazc41f2062017-10-24 10:07:35 +010069 .section \section_name, "ax"
Sandrine Bailleux618ba992016-05-24 16:22:59 +010070 .align 7, 0
Douglas Raillardefa50b52017-08-07 16:20:46 +010071 .type \label, %function
72 .func \label
73 .cfi_startproc
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010074 \label:
75 .endm
Achin Gupta4f6ad662013-10-25 09:08:21 +010076
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000077 /*
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010078 * This macro verifies that the given vector doesn't exceed the
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000079 * architectural limit of 32 instructions. This is meant to be placed
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010080 * immediately after the last instruction in the vector. It takes the
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000081 * vector entry as the parameter
82 */
83 .macro check_vector_size since
Douglas Raillardefa50b52017-08-07 16:20:46 +010084 .endfunc
85 .cfi_endproc
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000086 .if (. - \since) > (32 * 4)
87 .error "Vector exceeds 32 instructions"
88 .endif
89 .endm
Andrew Thoelke38bde412014-03-18 13:46:55 +000090
Soby Mathew981487a2015-07-13 14:10:57 +010091#if ENABLE_PLAT_COMPAT
Andrew Thoelke65668f92014-03-20 10:48:23 +000092 /*
93 * This macro calculates the base address of an MP stack using the
94 * platform_get_core_pos() index, the name of the stack storage and
95 * the size of each stack
96 * In: X0 = MPIDR of CPU whose stack is wanted
97 * Out: X0 = physical address of stack base
98 * Clobber: X30, X1, X2
99 */
100 .macro get_mp_stack _name, _size
101 bl platform_get_core_pos
102 ldr x2, =(\_name + \_size)
103 mov x1, #\_size
104 madd x0, x0, x1, x2
105 .endm
Soby Mathew981487a2015-07-13 14:10:57 +0100106#endif
Andrew Thoelke65668f92014-03-20 10:48:23 +0000107
108 /*
Soby Mathewb0082d22015-04-09 13:40:55 +0100109 * This macro calculates the base address of the current CPU's MP stack
110 * using the plat_my_core_pos() index, the name of the stack storage
111 * and the size of each stack
112 * Out: X0 = physical address of stack base
113 * Clobber: X30, X1, X2
114 */
115 .macro get_my_mp_stack _name, _size
116 bl plat_my_core_pos
117 ldr x2, =(\_name + \_size)
118 mov x1, #\_size
119 madd x0, x0, x1, x2
120 .endm
121
122 /*
Andrew Thoelke65668f92014-03-20 10:48:23 +0000123 * This macro calculates the base address of a UP stack using the
124 * name of the stack storage and the size of the stack
125 * Out: X0 = physical address of stack base
126 */
127 .macro get_up_stack _name, _size
128 ldr x0, =(\_name + \_size)
129 .endm
Soby Mathew066f7132014-07-14 16:57:23 +0100130
131 /*
132 * Helper macro to generate the best mov/movk combinations according
133 * the value to be moved. The 16 bits from '_shift' are tested and
134 * if not zero, they are moved into '_reg' without affecting
135 * other bits.
136 */
137 .macro _mov_imm16 _reg, _val, _shift
138 .if (\_val >> \_shift) & 0xffff
139 .if (\_val & (1 << \_shift - 1))
140 movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
141 .else
142 mov \_reg, \_val & (0xffff << \_shift)
143 .endif
144 .endif
145 .endm
146
147 /*
148 * Helper macro to load arbitrary values into 32 or 64-bit registers
149 * which generates the best mov/movk combinations. Many base addresses
150 * are 64KB aligned the macro will eliminate updating bits 15:0 in
151 * that case
152 */
153 .macro mov_imm _reg, _val
154 .if (\_val) == 0
155 mov \_reg, #0
156 .else
157 _mov_imm16 \_reg, (\_val), 0
158 _mov_imm16 \_reg, (\_val), 16
159 _mov_imm16 \_reg, (\_val), 32
160 _mov_imm16 \_reg, (\_val), 48
161 .endif
162 .endm
Dan Handleyea596682015-04-01 17:34:24 +0100163
Jeenu Viswambharan68aef102016-11-30 15:21:11 +0000164 /*
165 * Macro to mark instances where we're jumping to a function and don't
166 * expect a return. To provide the function being jumped to with
167 * additional information, we use 'bl' instruction to jump rather than
168 * 'b'.
169 *
170 * Debuggers infer the location of a call from where LR points to, which
171 * is usually the instruction after 'bl'. If this macro expansion
172 * happens to be the last location in a function, that'll cause the LR
173 * to point a location beyond the function, thereby misleading debugger
174 * back trace. We therefore insert a 'nop' after the function call for
175 * debug builds, unless 'skip_nop' parameter is non-zero.
176 */
177 .macro no_ret _func:req, skip_nop=0
178 bl \_func
179#if DEBUG
180 .ifeq \skip_nop
181 nop
182 .endif
183#endif
184 .endm
185
Jeenu Viswambharan54ec86a2017-01-19 14:23:36 +0000186 /*
187 * Reserve space for a spin lock in assembly file.
188 */
189 .macro define_asm_spinlock _name:req
190 .align SPINLOCK_ASM_ALIGN
191 \_name:
192 .space SPINLOCK_ASM_SIZE
193 .endm
194
Jeenu Viswambharan9a7ce2f2018-04-04 16:07:11 +0100195#if RAS_EXTENSION
196 .macro esb
197 .inst 0xd503221f
198 .endm
199#endif
200
Dan Handleyea596682015-04-01 17:34:24 +0100201#endif /* __ASM_MACROS_S__ */