Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 1 | /* |
Boyan Karatotev | e7d7c27 | 2023-01-25 16:55:18 +0000 | [diff] [blame] | 2 | * Copyright (c) 2016-2023, ARM Limited and Contributors. All rights reserved. |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 5 | */ |
Antonio Nino Diaz | 5eb8837 | 2018-11-08 10:20:19 +0000 | [diff] [blame] | 6 | #ifndef ASM_MACROS_S |
| 7 | #define ASM_MACROS_S |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 8 | |
| 9 | #include <arch.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 10 | #include <common/asm_macros_common.S> |
Boyan Karatotev | e7d7c27 | 2023-01-25 16:55:18 +0000 | [diff] [blame] | 11 | #include <lib/cpus/cpu_ops.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 12 | #include <lib/spinlock.h> |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 13 | |
Jeenu Viswambharan | 58e8148 | 2018-04-27 15:06:57 +0100 | [diff] [blame] | 14 | /* |
| 15 | * TLBI instruction with type specifier that implements the workaround for |
| 16 | * errata 813419 of Cortex-A57. |
| 17 | */ |
| 18 | #if ERRATA_A57_813419 |
| 19 | #define TLB_INVALIDATE(_reg, _coproc) \ |
| 20 | stcopr _reg, _coproc; \ |
| 21 | dsb ish; \ |
| 22 | stcopr _reg, _coproc |
| 23 | #else |
| 24 | #define TLB_INVALIDATE(_reg, _coproc) \ |
| 25 | stcopr _reg, _coproc |
| 26 | #endif |
| 27 | |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 28 | /* |
| 29 | * Co processor register accessors |
| 30 | */ |
| 31 | .macro ldcopr reg, coproc, opc1, CRn, CRm, opc2 |
| 32 | mrc \coproc, \opc1, \reg, \CRn, \CRm, \opc2 |
| 33 | .endm |
| 34 | |
| 35 | .macro ldcopr16 reg1, reg2, coproc, opc1, CRm |
| 36 | mrrc \coproc, \opc1, \reg1, \reg2, \CRm |
| 37 | .endm |
| 38 | |
| 39 | .macro stcopr reg, coproc, opc1, CRn, CRm, opc2 |
| 40 | mcr \coproc, \opc1, \reg, \CRn, \CRm, \opc2 |
| 41 | .endm |
| 42 | |
| 43 | .macro stcopr16 reg1, reg2, coproc, opc1, CRm |
| 44 | mcrr \coproc, \opc1, \reg1, \reg2, \CRm |
| 45 | .endm |
| 46 | |
| 47 | /* Cache line size helpers */ |
| 48 | .macro dcache_line_size reg, tmp |
| 49 | ldcopr \tmp, CTR |
| 50 | ubfx \tmp, \tmp, #CTR_DMINLINE_SHIFT, #CTR_DMINLINE_WIDTH |
Boyan Karatotev | e7d7c27 | 2023-01-25 16:55:18 +0000 | [diff] [blame] | 51 | mov \reg, #CPU_WORD_SIZE |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 52 | lsl \reg, \reg, \tmp |
| 53 | .endm |
| 54 | |
| 55 | .macro icache_line_size reg, tmp |
| 56 | ldcopr \tmp, CTR |
| 57 | and \tmp, \tmp, #CTR_IMINLINE_MASK |
Boyan Karatotev | e7d7c27 | 2023-01-25 16:55:18 +0000 | [diff] [blame] | 58 | mov \reg, #CPU_WORD_SIZE |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 59 | lsl \reg, \reg, \tmp |
| 60 | .endm |
| 61 | |
| 62 | /* |
Yatharth Kochar | f528faf | 2016-06-28 16:58:26 +0100 | [diff] [blame] | 63 | * Declare the exception vector table, enforcing it is aligned on a |
| 64 | * 32 byte boundary. |
| 65 | */ |
| 66 | .macro vector_base label |
| 67 | .section .vectors, "ax" |
| 68 | .align 5 |
| 69 | \label: |
| 70 | .endm |
| 71 | |
| 72 | /* |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 73 | * This macro calculates the base address of the current CPU's multi |
| 74 | * processor(MP) stack using the plat_my_core_pos() index, the name of |
| 75 | * the stack storage and the size of each stack. |
| 76 | * Out: r0 = physical address of stack base |
| 77 | * Clobber: r14, r1, r2 |
| 78 | */ |
| 79 | .macro get_my_mp_stack _name, _size |
Usama Arif | b69ac08 | 2018-12-12 17:08:33 +0000 | [diff] [blame] | 80 | bl plat_my_core_pos |
Soby Mathew | d29f67b | 2016-05-05 12:31:57 +0100 | [diff] [blame] | 81 | ldr r2, =(\_name + \_size) |
| 82 | mov r1, #\_size |
| 83 | mla r0, r0, r1, r2 |
| 84 | .endm |
| 85 | |
| 86 | /* |
| 87 | * This macro calculates the base address of a uniprocessor(UP) stack |
| 88 | * using the name of the stack storage and the size of the stack |
| 89 | * Out: r0 = physical address of stack base |
| 90 | */ |
| 91 | .macro get_up_stack _name, _size |
| 92 | ldr r0, =(\_name + \_size) |
| 93 | .endm |
| 94 | |
Etienne Carriere | 4cce835 | 2017-11-08 14:38:33 +0100 | [diff] [blame] | 95 | #if ARM_ARCH_MAJOR == 7 && !defined(ARMV7_SUPPORTS_VIRTUALIZATION) |
| 96 | /* |
Madhukar Pappireddy | fcbcd6f | 2020-02-26 12:37:05 -0600 | [diff] [blame] | 97 | * Macro for mitigating against speculative execution. |
Etienne Carriere | 4cce835 | 2017-11-08 14:38:33 +0100 | [diff] [blame] | 98 | * ARMv7 cores without Virtualization extension do not support the |
| 99 | * eret instruction. |
| 100 | */ |
Madhukar Pappireddy | fcbcd6f | 2020-02-26 12:37:05 -0600 | [diff] [blame] | 101 | .macro exception_return |
Etienne Carriere | 4cce835 | 2017-11-08 14:38:33 +0100 | [diff] [blame] | 102 | movs pc, lr |
Madhukar Pappireddy | fcbcd6f | 2020-02-26 12:37:05 -0600 | [diff] [blame] | 103 | dsb nsh |
| 104 | isb |
| 105 | .endm |
| 106 | |
| 107 | #else |
| 108 | /* |
Chris Kay | 08fec33 | 2021-03-09 13:34:35 +0000 | [diff] [blame] | 109 | * Macro for mitigating against speculative execution beyond ERET. Uses the |
| 110 | * speculation barrier instruction introduced by FEAT_SB, if it's enabled. |
Madhukar Pappireddy | fcbcd6f | 2020-02-26 12:37:05 -0600 | [diff] [blame] | 111 | */ |
| 112 | .macro exception_return |
| 113 | eret |
Chris Kay | 08fec33 | 2021-03-09 13:34:35 +0000 | [diff] [blame] | 114 | #if ENABLE_FEAT_SB |
Madhukar Pappireddy | bfe7bb6 | 2020-03-10 18:04:59 -0500 | [diff] [blame] | 115 | sb |
| 116 | #else |
Madhukar Pappireddy | fcbcd6f | 2020-02-26 12:37:05 -0600 | [diff] [blame] | 117 | dsb nsh |
| 118 | isb |
Madhukar Pappireddy | bfe7bb6 | 2020-03-10 18:04:59 -0500 | [diff] [blame] | 119 | #endif |
Etienne Carriere | 4cce835 | 2017-11-08 14:38:33 +0100 | [diff] [blame] | 120 | .endm |
| 121 | #endif |
| 122 | |
Etienne Carriere | 70b1c2f | 2017-11-05 22:55:47 +0100 | [diff] [blame] | 123 | #if (ARM_ARCH_MAJOR == 7) |
| 124 | /* ARMv7 does not support stl instruction */ |
| 125 | .macro stl _reg, _write_lock |
| 126 | dmb |
| 127 | str \_reg, \_write_lock |
| 128 | dsb |
| 129 | .endm |
| 130 | #endif |
| 131 | |
Jeenu Viswambharan | 68aef10 | 2016-11-30 15:21:11 +0000 | [diff] [blame] | 132 | /* |
Etienne Carriere | 97ad6ce | 2017-09-01 10:22:20 +0200 | [diff] [blame] | 133 | * Helper macro to generate the best mov/movw/movt combinations |
| 134 | * according to the value to be moved. |
| 135 | */ |
| 136 | .macro mov_imm _reg, _val |
| 137 | .if ((\_val) & 0xffff0000) == 0 |
| 138 | mov \_reg, #(\_val) |
| 139 | .else |
| 140 | movw \_reg, #((\_val) & 0xffff) |
| 141 | movt \_reg, #((\_val) >> 16) |
| 142 | .endif |
| 143 | .endm |
| 144 | |
| 145 | /* |
Jeenu Viswambharan | 68aef10 | 2016-11-30 15:21:11 +0000 | [diff] [blame] | 146 | * Macro to mark instances where we're jumping to a function and don't |
| 147 | * expect a return. To provide the function being jumped to with |
| 148 | * additional information, we use 'bl' instruction to jump rather than |
| 149 | * 'b'. |
| 150 | * |
| 151 | * Debuggers infer the location of a call from where LR points to, which |
| 152 | * is usually the instruction after 'bl'. If this macro expansion |
| 153 | * happens to be the last location in a function, that'll cause the LR |
| 154 | * to point a location beyond the function, thereby misleading debugger |
| 155 | * back trace. We therefore insert a 'nop' after the function call for |
| 156 | * debug builds, unless 'skip_nop' parameter is non-zero. |
| 157 | */ |
| 158 | .macro no_ret _func:req, skip_nop=0 |
| 159 | bl \_func |
| 160 | #if DEBUG |
| 161 | .ifeq \skip_nop |
| 162 | nop |
| 163 | .endif |
| 164 | #endif |
| 165 | .endm |
| 166 | |
Jeenu Viswambharan | 54ec86a | 2017-01-19 14:23:36 +0000 | [diff] [blame] | 167 | /* |
| 168 | * Reserve space for a spin lock in assembly file. |
| 169 | */ |
| 170 | .macro define_asm_spinlock _name:req |
| 171 | .align SPINLOCK_ASM_ALIGN |
| 172 | \_name: |
| 173 | .space SPINLOCK_ASM_SIZE |
| 174 | .endm |
| 175 | |
Yatharth Kochar | a9f776c | 2016-11-10 16:17:51 +0000 | [diff] [blame] | 176 | /* |
| 177 | * Helper macro to OR the bottom 32 bits of `_val` into `_reg_l` |
| 178 | * and the top 32 bits of `_val` into `_reg_h`. If either the bottom |
| 179 | * or top word of `_val` is zero, the corresponding OR operation |
| 180 | * is skipped. |
| 181 | */ |
| 182 | .macro orr64_imm _reg_l, _reg_h, _val |
| 183 | .if (\_val >> 32) |
| 184 | orr \_reg_h, \_reg_h, #(\_val >> 32) |
| 185 | .endif |
| 186 | .if (\_val & 0xffffffff) |
| 187 | orr \_reg_l, \_reg_l, #(\_val & 0xffffffff) |
| 188 | .endif |
| 189 | .endm |
| 190 | |
| 191 | /* |
| 192 | * Helper macro to bitwise-clear bits in `_reg_l` and |
| 193 | * `_reg_h` given a 64 bit immediate `_val`. The set bits |
| 194 | * in the bottom word of `_val` dictate which bits from |
| 195 | * `_reg_l` should be cleared. Similarly, the set bits in |
| 196 | * the top word of `_val` dictate which bits from `_reg_h` |
| 197 | * should be cleared. If either the bottom or top word of |
| 198 | * `_val` is zero, the corresponding BIC operation is skipped. |
| 199 | */ |
| 200 | .macro bic64_imm _reg_l, _reg_h, _val |
| 201 | .if (\_val >> 32) |
| 202 | bic \_reg_h, \_reg_h, #(\_val >> 32) |
| 203 | .endif |
| 204 | .if (\_val & 0xffffffff) |
| 205 | bic \_reg_l, \_reg_l, #(\_val & 0xffffffff) |
| 206 | .endif |
| 207 | .endm |
| 208 | |
Usama Arif | b69ac08 | 2018-12-12 17:08:33 +0000 | [diff] [blame] | 209 | /* |
| 210 | * Helper macro for carrying out division in software when |
| 211 | * hardware division is not suported. \top holds the dividend |
| 212 | * in the function call and the remainder after |
| 213 | * the function is executed. \bot holds the divisor. \div holds |
| 214 | * the quotient and \temp is a temporary registed used in calcualtion. |
| 215 | * The division algorithm has been obtained from: |
| 216 | * http://www.keil.com/support/man/docs/armasm/armasm_dom1359731155623.htm |
| 217 | */ |
| 218 | .macro softudiv div:req,top:req,bot:req,temp:req |
| 219 | |
| 220 | mov \temp, \bot |
| 221 | cmp \temp, \top, lsr #1 |
| 222 | div1: |
| 223 | movls \temp, \temp, lsl #1 |
| 224 | cmp \temp, \top, lsr #1 |
| 225 | bls div1 |
| 226 | mov \div, #0 |
| 227 | |
| 228 | div2: |
| 229 | cmp \top, \temp |
| 230 | subcs \top, \top,\temp |
| 231 | ADC \div, \div, \div |
| 232 | mov \temp, \temp, lsr #1 |
| 233 | cmp \temp, \bot |
| 234 | bhs div2 |
| 235 | .endm |
Antonio Nino Diaz | 5eb8837 | 2018-11-08 10:20:19 +0000 | [diff] [blame] | 236 | #endif /* ASM_MACROS_S */ |