blob: 4fda483b8d8d9573a83ba4eedde34e72367f9721 [file] [log] [blame]
Matthias Weisser93416c12011-03-10 21:36:32 +00001/*
2 * arch/arm/include/asm/assembler.h
3 *
4 * Copyright (C) 1996-2000 Russell King
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This file contains arm architecture specific defines
11 * for the different processors.
12 *
13 * Do not include any C declarations in this file - it is included by
14 * assembler source.
15 */
16
Marek Vasutd0869182016-05-26 18:01:37 +020017#include <asm/unified.h>
Stefan Agnera257f2e2014-12-18 18:10:33 +010018
Matthias Weisser93416c12011-03-10 21:36:32 +000019/*
20 * Endian independent macros for shifting bytes within registers.
21 */
22#ifndef __ARMEB__
Stefan Agnera257f2e2014-12-18 18:10:33 +010023#define lspull lsr
24#define lspush lsl
Matthias Weisser93416c12011-03-10 21:36:32 +000025#define get_byte_0 lsl #0
26#define get_byte_1 lsr #8
27#define get_byte_2 lsr #16
28#define get_byte_3 lsr #24
29#define put_byte_0 lsl #0
30#define put_byte_1 lsl #8
31#define put_byte_2 lsl #16
32#define put_byte_3 lsl #24
33#else
Stefan Agnera257f2e2014-12-18 18:10:33 +010034#define lspull lsl
35#define lspush lsr
Matthias Weisser93416c12011-03-10 21:36:32 +000036#define get_byte_0 lsr #24
37#define get_byte_1 lsr #16
38#define get_byte_2 lsr #8
39#define get_byte_3 lsl #0
40#define put_byte_0 lsl #24
41#define put_byte_1 lsl #16
42#define put_byte_2 lsl #8
43#define put_byte_3 lsl #0
44#endif
45
46/*
47 * Data preload for architectures that support it
48 */
49#if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \
50 defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
51 defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \
52 defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \
53 defined(__ARM_ARCH_7R__)
54#define PLD(code...) code
55#else
56#define PLD(code...)
57#endif
Stefan Agnera257f2e2014-12-18 18:10:33 +010058
Tom Rini9c734be2017-03-02 09:59:30 -050059/*
Sergei Antonov85f8c352022-08-21 16:34:20 +030060 * Use 'bx lr' everywhere except ARMv4 (without 'T') where only 'mov pc, lr'
61 * works
Tom Rini9c734be2017-03-02 09:59:30 -050062 */
Stefan Agnera257f2e2014-12-18 18:10:33 +010063 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
64 .macro ret\c, reg
Sergei Antonov85f8c352022-08-21 16:34:20 +030065
66 /* ARMv4- don't know bx lr but the assembler fails to see that */
67#ifdef __ARM_ARCH_4__
68 mov\c pc, \reg
69#else
Stefan Agnera257f2e2014-12-18 18:10:33 +010070 .ifeqs "\reg", "lr"
71 bx\c \reg
72 .else
73 mov\c pc, \reg
74 .endif
Sergei Antonov85f8c352022-08-21 16:34:20 +030075#endif
Stefan Agnera257f2e2014-12-18 18:10:33 +010076 .endm
77 .endr
Matthias Weisser93416c12011-03-10 21:36:32 +000078
79/*
Stefan Agnera257f2e2014-12-18 18:10:33 +010080 * Cache aligned, used for optimized memcpy/memset
81 * In the kernel this is only enabled for Feroceon CPU's...
82 * We disable it especially for Thumb builds since those instructions
83 * are not made in a Thumb ready way...
Matthias Weisser93416c12011-03-10 21:36:32 +000084 */
Tom Rini1c640a62017-03-18 09:01:44 -040085#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
Stefan Agnera257f2e2014-12-18 18:10:33 +010086#define CALGN(code...)
87#else
Matthias Weisser93416c12011-03-10 21:36:32 +000088#define CALGN(code...) code
Stefan Agnera257f2e2014-12-18 18:10:33 +010089#endif