1 /* 2 * arch/arm/include/asm/assembler.h 3 * 4 * Copyright (C) 1996-2000 Russell King 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License version 2 as 8 * published by the Free Software Foundation. 9 * 10 * This file contains arm architecture specific defines 11 * for the different processors. 12 * 13 * Do not include any C declarations in this file - it is included by 14 * assembler source. 15 */ 16 17 #include <config.h> 18 #include <asm/unified.h> 19 20 /* 21 * Endian independent macros for shifting bytes within registers. 22 */ 23 #ifndef __ARMEB__ 24 #define lspull lsr 25 #define lspush lsl 26 #define get_byte_0 lsl #0 27 #define get_byte_1 lsr #8 28 #define get_byte_2 lsr #16 29 #define get_byte_3 lsr #24 30 #define put_byte_0 lsl #0 31 #define put_byte_1 lsl #8 32 #define put_byte_2 lsl #16 33 #define put_byte_3 lsl #24 34 #else 35 #define lspull lsl 36 #define lspush lsr 37 #define get_byte_0 lsr #24 38 #define get_byte_1 lsr #16 39 #define get_byte_2 lsr #8 40 #define get_byte_3 lsl #0 41 #define put_byte_0 lsl #24 42 #define put_byte_1 lsl #16 43 #define put_byte_2 lsl #8 44 #define put_byte_3 lsl #0 45 #endif 46 47 /* 48 * Data preload for architectures that support it 49 */ 50 #if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \ 51 defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \ 52 defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \ 53 defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \ 54 defined(__ARM_ARCH_7R__) 55 #define PLD(code...) code 56 #else 57 #define PLD(code...) 58 #endif 59 60 /* 61 * Use 'bx lr' everywhere except ARMv4 (without 'T') where only 'mov pc, lr' 62 * works 63 */ 64 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo 65 .macro ret\c, reg 66 67 /* ARMv4- don't know bx lr but the assembler fails to see that */ 68 #ifdef __ARM_ARCH_4__ 69 mov\c pc, \reg 70 #else 71 .ifeqs "\reg", "lr" 72 bx\c \reg 73 .else 74 mov\c pc, \reg 75 .endif 76 #endif 77 .endm 78 .endr 79 80 /* 81 * Cache aligned, used for optimized memcpy/memset 82 * In the kernel this is only enabled for Feroceon CPU's... 83 * We disable it especially for Thumb builds since those instructions 84 * are not made in a Thumb ready way... 85 */ 86 #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD) 87 #define CALGN(code...) 88 #else 89 #define CALGN(code...) code 90 #endif 91