blob: 464c05be1aa40da966d35016f46475b7fe3aa41f [file] [log] [blame]
Achin Gupta4f6ad662013-10-25 09:08:21 +01001/*
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -08002 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
Achin Gupta4f6ad662013-10-25 09:08:21 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta4f6ad662013-10-25 09:08:21 +01005 */
Antonio Nino Diaz5eb88372018-11-08 10:20:19 +00006#ifndef ASM_MACROS_S
7#define ASM_MACROS_S
Achin Gupta4f6ad662013-10-25 09:08:21 +01008
Dan Handley2bd4ef22014-04-09 13:14:54 +01009#include <arch.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000010#include <common/asm_macros_common.S>
11#include <lib/spinlock.h>
Dan Handley2bd4ef22014-04-09 13:14:54 +010012
Alexei Fedorov90f2e882019-05-24 12:17:09 +010013#if ENABLE_BTI && !ARM_ARCH_AT_LEAST(8, 5)
14#error Branch Target Identification requires ARM_ARCH_MINOR >= 5
15#endif
16
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010017/*
18 * TLBI instruction with type specifier that implements the workaround for
Soby Mathew16d006b2019-05-03 13:17:56 +010019 * errata 813419 of Cortex-A57 or errata 1286807 of Cortex-A76.
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010020 */
Soby Mathew16d006b2019-05-03 13:17:56 +010021#if ERRATA_A57_813419 || ERRATA_A76_1286807
Jeenu Viswambharan58e81482018-04-27 15:06:57 +010022#define TLB_INVALIDATE(_type) \
23 tlbi _type; \
24 dsb ish; \
25 tlbi _type
26#else
27#define TLB_INVALIDATE(_type) \
28 tlbi _type
29#endif
30
Dan Handley2bd4ef22014-04-09 13:14:54 +010031
Achin Gupta4f6ad662013-10-25 09:08:21 +010032 .macro func_prologue
33 stp x29, x30, [sp, #-0x10]!
34 mov x29,sp
35 .endm
36
37 .macro func_epilogue
38 ldp x29, x30, [sp], #0x10
39 .endm
40
41
42 .macro dcache_line_size reg, tmp
Achin Gupta07f4e072014-02-02 12:02:23 +000043 mrs \tmp, ctr_el0
Achin Gupta4f6ad662013-10-25 09:08:21 +010044 ubfx \tmp, \tmp, #16, #4
Achin Gupta07f4e072014-02-02 12:02:23 +000045 mov \reg, #4
46 lsl \reg, \reg, \tmp
Achin Gupta4f6ad662013-10-25 09:08:21 +010047 .endm
48
49
50 .macro icache_line_size reg, tmp
Achin Gupta07f4e072014-02-02 12:02:23 +000051 mrs \tmp, ctr_el0
52 and \tmp, \tmp, #0xf
53 mov \reg, #4
54 lsl \reg, \reg, \tmp
Achin Gupta4f6ad662013-10-25 09:08:21 +010055 .endm
56
57
Achin Gupta4f6ad662013-10-25 09:08:21 +010058 .macro smc_check label
Andrew Thoelkef977ed82014-04-28 12:32:02 +010059 mrs x0, esr_el3
Achin Gupta4f6ad662013-10-25 09:08:21 +010060 ubfx x0, x0, #ESR_EC_SHIFT, #ESR_EC_LENGTH
61 cmp x0, #EC_AARCH64_SMC
62 b.ne $label
63 .endm
64
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010065 /*
66 * Declare the exception vector table, enforcing it is aligned on a
67 * 2KB boundary, as required by the ARMv8 architecture.
Sandrine Bailleux618ba992016-05-24 16:22:59 +010068 * Use zero bytes as the fill value to be stored in the padding bytes
69 * so that it inserts illegal AArch64 instructions. This increases
70 * security, robustness and potentially facilitates debugging.
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010071 */
Antonio Nino Diazc41f2062017-10-24 10:07:35 +010072 .macro vector_base label, section_name=.vectors
73 .section \section_name, "ax"
Sandrine Bailleux618ba992016-05-24 16:22:59 +010074 .align 11, 0
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010075 \label:
76 .endm
77
78 /*
79 * Create an entry in the exception vector table, enforcing it is
80 * aligned on a 128-byte boundary, as required by the ARMv8 architecture.
Sandrine Bailleux618ba992016-05-24 16:22:59 +010081 * Use zero bytes as the fill value to be stored in the padding bytes
82 * so that it inserts illegal AArch64 instructions. This increases
83 * security, robustness and potentially facilitates debugging.
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010084 */
Antonio Nino Diazc41f2062017-10-24 10:07:35 +010085 .macro vector_entry label, section_name=.vectors
Douglas Raillardefa50b52017-08-07 16:20:46 +010086 .cfi_sections .debug_frame
Antonio Nino Diazc41f2062017-10-24 10:07:35 +010087 .section \section_name, "ax"
Sandrine Bailleux618ba992016-05-24 16:22:59 +010088 .align 7, 0
Douglas Raillardefa50b52017-08-07 16:20:46 +010089 .type \label, %function
Douglas Raillardefa50b52017-08-07 16:20:46 +010090 .cfi_startproc
Sandrine Bailleux9e6ad6c2016-05-24 16:56:03 +010091 \label:
92 .endm
Achin Gupta4f6ad662013-10-25 09:08:21 +010093
Jeenu Viswambharana7934d62014-02-07 15:53:18 +000094 /*
Roberto Vargas95f30ab2018-04-17 11:31:43 +010095 * Add the bytes until fill the full exception vector, whose size is always
96 * 32 instructions. If there are more than 32 instructions in the
97 * exception vector then an error is emitted.
98 */
99 .macro end_vector_entry label
100 .cfi_endproc
101 .fill \label + (32 * 4) - .
102 .endm
103
104 /*
Soby Mathewb0082d22015-04-09 13:40:55 +0100105 * This macro calculates the base address of the current CPU's MP stack
106 * using the plat_my_core_pos() index, the name of the stack storage
107 * and the size of each stack
108 * Out: X0 = physical address of stack base
109 * Clobber: X30, X1, X2
110 */
111 .macro get_my_mp_stack _name, _size
Soby Mathewfcaf1bd2018-10-12 16:40:28 +0100112 bl plat_my_core_pos
113 adrp x2, (\_name + \_size)
114 add x2, x2, :lo12:(\_name + \_size)
Soby Mathewb0082d22015-04-09 13:40:55 +0100115 mov x1, #\_size
116 madd x0, x0, x1, x2
117 .endm
118
119 /*
Andrew Thoelke65668f92014-03-20 10:48:23 +0000120 * This macro calculates the base address of a UP stack using the
121 * name of the stack storage and the size of the stack
122 * Out: X0 = physical address of stack base
123 */
124 .macro get_up_stack _name, _size
Soby Mathewfcaf1bd2018-10-12 16:40:28 +0100125 adrp x0, (\_name + \_size)
126 add x0, x0, :lo12:(\_name + \_size)
Andrew Thoelke65668f92014-03-20 10:48:23 +0000127 .endm
Soby Mathew066f7132014-07-14 16:57:23 +0100128
129 /*
130 * Helper macro to generate the best mov/movk combinations according
131 * the value to be moved. The 16 bits from '_shift' are tested and
132 * if not zero, they are moved into '_reg' without affecting
133 * other bits.
134 */
135 .macro _mov_imm16 _reg, _val, _shift
136 .if (\_val >> \_shift) & 0xffff
137 .if (\_val & (1 << \_shift - 1))
138 movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
139 .else
140 mov \_reg, \_val & (0xffff << \_shift)
141 .endif
142 .endif
143 .endm
144
145 /*
146 * Helper macro to load arbitrary values into 32 or 64-bit registers
147 * which generates the best mov/movk combinations. Many base addresses
148 * are 64KB aligned the macro will eliminate updating bits 15:0 in
149 * that case
150 */
151 .macro mov_imm _reg, _val
152 .if (\_val) == 0
153 mov \_reg, #0
154 .else
155 _mov_imm16 \_reg, (\_val), 0
156 _mov_imm16 \_reg, (\_val), 16
157 _mov_imm16 \_reg, (\_val), 32
158 _mov_imm16 \_reg, (\_val), 48
159 .endif
160 .endm
Dan Handleyea596682015-04-01 17:34:24 +0100161
Jeenu Viswambharan68aef102016-11-30 15:21:11 +0000162 /*
163 * Macro to mark instances where we're jumping to a function and don't
164 * expect a return. To provide the function being jumped to with
165 * additional information, we use 'bl' instruction to jump rather than
166 * 'b'.
167 *
168 * Debuggers infer the location of a call from where LR points to, which
169 * is usually the instruction after 'bl'. If this macro expansion
170 * happens to be the last location in a function, that'll cause the LR
171 * to point a location beyond the function, thereby misleading debugger
172 * back trace. We therefore insert a 'nop' after the function call for
173 * debug builds, unless 'skip_nop' parameter is non-zero.
174 */
175 .macro no_ret _func:req, skip_nop=0
176 bl \_func
177#if DEBUG
178 .ifeq \skip_nop
179 nop
180 .endif
181#endif
182 .endm
183
Jeenu Viswambharan54ec86a2017-01-19 14:23:36 +0000184 /*
185 * Reserve space for a spin lock in assembly file.
186 */
187 .macro define_asm_spinlock _name:req
188 .align SPINLOCK_ASM_ALIGN
189 \_name:
190 .space SPINLOCK_ASM_SIZE
191 .endm
192
Jeenu Viswambharan9a7ce2f2018-04-04 16:07:11 +0100193#if RAS_EXTENSION
194 .macro esb
195 .inst 0xd503221f
196 .endm
Alexei Fedorov90f2e882019-05-24 12:17:09 +0100197#endif
198
199 /*
200 * Helper macro to read system register value into x0
201 */
202 .macro read reg:req
203#if ENABLE_BTI
204 bti j
205#endif
206 mrs x0, \reg
207 ret
208 .endm
209
210 /*
211 * Helper macro to write value from x1 to system register
212 */
213 .macro write reg:req
214#if ENABLE_BTI
215 bti j
Jeenu Viswambharan9a7ce2f2018-04-04 16:07:11 +0100216#endif
Alexei Fedorov90f2e882019-05-24 12:17:09 +0100217 msr \reg, x1
218 ret
219 .endm
Jeenu Viswambharan9a7ce2f2018-04-04 16:07:11 +0100220
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800221 /*
Chris Kay08fec332021-03-09 13:34:35 +0000222 * Macro for mitigating against speculative execution beyond ERET. Uses the
223 * speculation barrier instruction introduced by FEAT_SB, if it's enabled.
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800224 */
225 .macro exception_return
226 eret
Chris Kay08fec332021-03-09 13:34:35 +0000227#if ENABLE_FEAT_SB
Madhukar Pappireddybfe7bb62020-03-10 18:04:59 -0500228 sb
229#else
230 dsb nsh
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800231 isb
Madhukar Pappireddybfe7bb62020-03-10 18:04:59 -0500232#endif
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800233 .endm
234
Antonio Nino Diaz5eb88372018-11-08 10:20:19 +0000235#endif /* ASM_MACROS_S */