Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 1 | /* |
| 2 | * arch/arm/include/asm/assembler.h |
| 3 | * |
| 4 | * Copyright (C) 1996-2000 Russell King |
| 5 | * |
| 6 | * This program is free software; you can redistribute it and/or modify |
| 7 | * it under the terms of the GNU General Public License version 2 as |
| 8 | * published by the Free Software Foundation. |
| 9 | * |
| 10 | * This file contains arm architecture specific defines |
| 11 | * for the different processors. |
| 12 | * |
| 13 | * Do not include any C declarations in this file - it is included by |
| 14 | * assembler source. |
| 15 | */ |
| 16 | |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 17 | #include <config.h> |
Marek Vasut | d086918 | 2016-05-26 18:01:37 +0200 | [diff] [blame] | 18 | #include <asm/unified.h> |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 19 | |
Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 20 | /* |
| 21 | * Endian independent macros for shifting bytes within registers. |
| 22 | */ |
| 23 | #ifndef __ARMEB__ |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 24 | #define lspull lsr |
| 25 | #define lspush lsl |
Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 26 | #define get_byte_0 lsl #0 |
| 27 | #define get_byte_1 lsr #8 |
| 28 | #define get_byte_2 lsr #16 |
| 29 | #define get_byte_3 lsr #24 |
| 30 | #define put_byte_0 lsl #0 |
| 31 | #define put_byte_1 lsl #8 |
| 32 | #define put_byte_2 lsl #16 |
| 33 | #define put_byte_3 lsl #24 |
| 34 | #else |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 35 | #define lspull lsl |
| 36 | #define lspush lsr |
Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 37 | #define get_byte_0 lsr #24 |
| 38 | #define get_byte_1 lsr #16 |
| 39 | #define get_byte_2 lsr #8 |
| 40 | #define get_byte_3 lsl #0 |
| 41 | #define put_byte_0 lsl #24 |
| 42 | #define put_byte_1 lsl #16 |
| 43 | #define put_byte_2 lsl #8 |
| 44 | #define put_byte_3 lsl #0 |
| 45 | #endif |
| 46 | |
| 47 | /* |
| 48 | * Data preload for architectures that support it |
| 49 | */ |
| 50 | #if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \ |
| 51 | defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \ |
| 52 | defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \ |
| 53 | defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \ |
| 54 | defined(__ARM_ARCH_7R__) |
| 55 | #define PLD(code...) code |
| 56 | #else |
| 57 | #define PLD(code...) |
| 58 | #endif |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 59 | |
| 60 | .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo |
| 61 | .macro ret\c, reg |
| 62 | #if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) |
| 63 | mov\c pc, \reg |
| 64 | #else |
| 65 | .ifeqs "\reg", "lr" |
| 66 | bx\c \reg |
| 67 | .else |
| 68 | mov\c pc, \reg |
| 69 | .endif |
| 70 | #endif |
| 71 | .endm |
| 72 | .endr |
Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 73 | |
| 74 | /* |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 75 | * Cache aligned, used for optimized memcpy/memset |
| 76 | * In the kernel this is only enabled for Feroceon CPU's... |
| 77 | * We disable it especially for Thumb builds since those instructions |
| 78 | * are not made in a Thumb ready way... |
Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 79 | */ |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 80 | #ifdef CONFIG_SYS_THUMB_BUILD |
| 81 | #define CALGN(code...) |
| 82 | #else |
Matthias Weisser | 93416c1 | 2011-03-10 21:36:32 +0000 | [diff] [blame] | 83 | #define CALGN(code...) code |
Stefan Agner | a257f2e | 2014-12-18 18:10:33 +0100 | [diff] [blame] | 84 | #endif |