blob: 3adcbf519b6bafcded2c2dbec993608317d2d448 [file] [log] [blame]
Soby Mathewd29f67b2016-05-05 12:31:57 +01001/*
Jeenu Viswambharan54ec86a2017-01-19 14:23:36 +00002 * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
Soby Mathewd29f67b2016-05-05 12:31:57 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathewd29f67b2016-05-05 12:31:57 +01005 */
6#ifndef __ASM_MACROS_S__
7#define __ASM_MACROS_S__
8
9#include <arch.h>
10#include <asm_macros_common.S>
Jeenu Viswambharan54ec86a2017-01-19 14:23:36 +000011#include <spinlock.h>
Soby Mathewd29f67b2016-05-05 12:31:57 +010012
13#define WORD_SIZE 4
14
15 /*
16 * Co processor register accessors
17 */
18 .macro ldcopr reg, coproc, opc1, CRn, CRm, opc2
19 mrc \coproc, \opc1, \reg, \CRn, \CRm, \opc2
20 .endm
21
22 .macro ldcopr16 reg1, reg2, coproc, opc1, CRm
23 mrrc \coproc, \opc1, \reg1, \reg2, \CRm
24 .endm
25
26 .macro stcopr reg, coproc, opc1, CRn, CRm, opc2
27 mcr \coproc, \opc1, \reg, \CRn, \CRm, \opc2
28 .endm
29
30 .macro stcopr16 reg1, reg2, coproc, opc1, CRm
31 mcrr \coproc, \opc1, \reg1, \reg2, \CRm
32 .endm
33
34 /* Cache line size helpers */
35 .macro dcache_line_size reg, tmp
36 ldcopr \tmp, CTR
37 ubfx \tmp, \tmp, #CTR_DMINLINE_SHIFT, #CTR_DMINLINE_WIDTH
38 mov \reg, #WORD_SIZE
39 lsl \reg, \reg, \tmp
40 .endm
41
42 .macro icache_line_size reg, tmp
43 ldcopr \tmp, CTR
44 and \tmp, \tmp, #CTR_IMINLINE_MASK
45 mov \reg, #WORD_SIZE
46 lsl \reg, \reg, \tmp
47 .endm
48
49 /*
Yatharth Kocharf528faf2016-06-28 16:58:26 +010050 * Declare the exception vector table, enforcing it is aligned on a
51 * 32 byte boundary.
52 */
53 .macro vector_base label
54 .section .vectors, "ax"
55 .align 5
56 \label:
57 .endm
58
59 /*
Soby Mathewd29f67b2016-05-05 12:31:57 +010060 * This macro calculates the base address of the current CPU's multi
61 * processor(MP) stack using the plat_my_core_pos() index, the name of
62 * the stack storage and the size of each stack.
63 * Out: r0 = physical address of stack base
64 * Clobber: r14, r1, r2
65 */
66 .macro get_my_mp_stack _name, _size
67 bl plat_my_core_pos
68 ldr r2, =(\_name + \_size)
69 mov r1, #\_size
70 mla r0, r0, r1, r2
71 .endm
72
73 /*
74 * This macro calculates the base address of a uniprocessor(UP) stack
75 * using the name of the stack storage and the size of the stack
76 * Out: r0 = physical address of stack base
77 */
78 .macro get_up_stack _name, _size
79 ldr r0, =(\_name + \_size)
80 .endm
81
Jeenu Viswambharan68aef102016-11-30 15:21:11 +000082 /*
83 * Macro to mark instances where we're jumping to a function and don't
84 * expect a return. To provide the function being jumped to with
85 * additional information, we use 'bl' instruction to jump rather than
86 * 'b'.
87 *
88 * Debuggers infer the location of a call from where LR points to, which
89 * is usually the instruction after 'bl'. If this macro expansion
90 * happens to be the last location in a function, that'll cause the LR
91 * to point a location beyond the function, thereby misleading debugger
92 * back trace. We therefore insert a 'nop' after the function call for
93 * debug builds, unless 'skip_nop' parameter is non-zero.
94 */
95 .macro no_ret _func:req, skip_nop=0
96 bl \_func
97#if DEBUG
98 .ifeq \skip_nop
99 nop
100 .endif
101#endif
102 .endm
103
Jeenu Viswambharan54ec86a2017-01-19 14:23:36 +0000104 /*
105 * Reserve space for a spin lock in assembly file.
106 */
107 .macro define_asm_spinlock _name:req
108 .align SPINLOCK_ASM_ALIGN
109 \_name:
110 .space SPINLOCK_ASM_SIZE
111 .endm
112
Yatharth Kochara9f776c2016-11-10 16:17:51 +0000113 /*
114 * Helper macro to OR the bottom 32 bits of `_val` into `_reg_l`
115 * and the top 32 bits of `_val` into `_reg_h`. If either the bottom
116 * or top word of `_val` is zero, the corresponding OR operation
117 * is skipped.
118 */
119 .macro orr64_imm _reg_l, _reg_h, _val
120 .if (\_val >> 32)
121 orr \_reg_h, \_reg_h, #(\_val >> 32)
122 .endif
123 .if (\_val & 0xffffffff)
124 orr \_reg_l, \_reg_l, #(\_val & 0xffffffff)
125 .endif
126 .endm
127
128 /*
129 * Helper macro to bitwise-clear bits in `_reg_l` and
130 * `_reg_h` given a 64 bit immediate `_val`. The set bits
131 * in the bottom word of `_val` dictate which bits from
132 * `_reg_l` should be cleared. Similarly, the set bits in
133 * the top word of `_val` dictate which bits from `_reg_h`
134 * should be cleared. If either the bottom or top word of
135 * `_val` is zero, the corresponding BIC operation is skipped.
136 */
137 .macro bic64_imm _reg_l, _reg_h, _val
138 .if (\_val >> 32)
139 bic \_reg_h, \_reg_h, #(\_val >> 32)
140 .endif
141 .if (\_val & 0xffffffff)
142 bic \_reg_l, \_reg_l, #(\_val & 0xffffffff)
143 .endif
144 .endm
145
Soby Mathewd29f67b2016-05-05 12:31:57 +0100146#endif /* __ASM_MACROS_S__ */