1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2 /*
3 * ARM specific definitions for NOLIBC
4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu>
5 */
6
7 #ifndef _NOLIBC_ARCH_ARM_H
8 #define _NOLIBC_ARCH_ARM_H
9
10 /* The struct returned by the stat() syscall, 32-bit only, the syscall returns
11 * exactly 56 bytes (stops before the unused array). In big endian, the format
12 * differs as devices are returned as short only.
13 */
14 struct sys_stat_struct {
15 #if defined(__ARMEB__)
16 unsigned short st_dev;
17 unsigned short __pad1;
18 #else
19 unsigned long st_dev;
20 #endif
21 unsigned long st_ino;
22 unsigned short st_mode;
23 unsigned short st_nlink;
24 unsigned short st_uid;
25 unsigned short st_gid;
26
27 #if defined(__ARMEB__)
28 unsigned short st_rdev;
29 unsigned short __pad2;
30 #else
31 unsigned long st_rdev;
32 #endif
33 unsigned long st_size;
34 unsigned long st_blksize;
35 unsigned long st_blocks;
36
37 unsigned long st_atime;
38 unsigned long st_atime_nsec;
39 unsigned long st_mtime;
40 unsigned long st_mtime_nsec;
41
42 unsigned long st_ctime;
43 unsigned long st_ctime_nsec;
44 unsigned long __unused[2];
45 };
46
47 /* Syscalls for ARM in ARM or Thumb modes :
48 * - registers are 32-bit
49 * - stack is 8-byte aligned
50 * ( http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.faqs/ka4127.html)
51 * - syscall number is passed in r7
52 * - arguments are in r0, r1, r2, r3, r4, r5
53 * - the system call is performed by calling svc #0
54 * - syscall return comes in r0.
55 * - only lr is clobbered.
56 * - the arguments are cast to long and assigned into the target registers
57 * which are then simply passed as registers to the asm code, so that we
58 * don't have to experience issues with register constraints.
59 * - the syscall number is always specified last in order to allow to force
60 * some registers before (gcc refuses a %-register at the last position).
61 * - in thumb mode without -fomit-frame-pointer, r7 is also used to store the
62 * frame pointer, and we cannot directly assign it as a register variable,
63 * nor can we clobber it. Instead we assign the r6 register and swap it
64 * with r7 before calling svc, and r6 is marked as clobbered.
65 * We're just using any regular register which we assign to r7 after saving
66 * it.
67 *
68 * Also, ARM supports the old_select syscall if newselect is not available
69 */
70 #define __ARCH_WANT_SYS_OLD_SELECT
71
72 #if (defined(__THUMBEB__) || defined(__THUMBEL__)) && \
73 !defined(NOLIBC_OMIT_FRAME_POINTER)
74 /* swap r6,r7 needed in Thumb mode since we can't use nor clobber r7 */
75 #define _NOLIBC_SYSCALL_REG "r6"
76 #define _NOLIBC_THUMB_SET_R7 "eor r7, r6\neor r6, r7\neor r7, r6\n"
77 #define _NOLIBC_THUMB_RESTORE_R7 "mov r7, r6\n"
78
79 #else /* we're in ARM mode */
80 /* in Arm mode we can directly use r7 */
81 #define _NOLIBC_SYSCALL_REG "r7"
82 #define _NOLIBC_THUMB_SET_R7 ""
83 #define _NOLIBC_THUMB_RESTORE_R7 ""
84
85 #endif /* end THUMB */
86
87 #define my_syscall0(num) \
88 ({ \
89 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
90 register long _arg1 __asm__ ("r0"); \
91 \
92 __asm__ volatile ( \
93 _NOLIBC_THUMB_SET_R7 \
94 "svc #0\n" \
95 _NOLIBC_THUMB_RESTORE_R7 \
96 : "=r"(_arg1), "=r"(_num) \
97 : "r"(_arg1), \
98 "r"(_num) \
99 : "memory", "cc", "lr" \
100 ); \
101 _arg1; \
102 })
103
104 #define my_syscall1(num, arg1) \
105 ({ \
106 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
107 register long _arg1 __asm__ ("r0") = (long)(arg1); \
108 \
109 __asm__ volatile ( \
110 _NOLIBC_THUMB_SET_R7 \
111 "svc #0\n" \
112 _NOLIBC_THUMB_RESTORE_R7 \
113 : "=r"(_arg1), "=r" (_num) \
114 : "r"(_arg1), \
115 "r"(_num) \
116 : "memory", "cc", "lr" \
117 ); \
118 _arg1; \
119 })
120
121 #define my_syscall2(num, arg1, arg2) \
122 ({ \
123 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
124 register long _arg1 __asm__ ("r0") = (long)(arg1); \
125 register long _arg2 __asm__ ("r1") = (long)(arg2); \
126 \
127 __asm__ volatile ( \
128 _NOLIBC_THUMB_SET_R7 \
129 "svc #0\n" \
130 _NOLIBC_THUMB_RESTORE_R7 \
131 : "=r"(_arg1), "=r" (_num) \
132 : "r"(_arg1), "r"(_arg2), \
133 "r"(_num) \
134 : "memory", "cc", "lr" \
135 ); \
136 _arg1; \
137 })
138
139 #define my_syscall3(num, arg1, arg2, arg3) \
140 ({ \
141 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
142 register long _arg1 __asm__ ("r0") = (long)(arg1); \
143 register long _arg2 __asm__ ("r1") = (long)(arg2); \
144 register long _arg3 __asm__ ("r2") = (long)(arg3); \
145 \
146 __asm__ volatile ( \
147 _NOLIBC_THUMB_SET_R7 \
148 "svc #0\n" \
149 _NOLIBC_THUMB_RESTORE_R7 \
150 : "=r"(_arg1), "=r" (_num) \
151 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \
152 "r"(_num) \
153 : "memory", "cc", "lr" \
154 ); \
155 _arg1; \
156 })
157
158 #define my_syscall4(num, arg1, arg2, arg3, arg4) \
159 ({ \
160 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
161 register long _arg1 __asm__ ("r0") = (long)(arg1); \
162 register long _arg2 __asm__ ("r1") = (long)(arg2); \
163 register long _arg3 __asm__ ("r2") = (long)(arg3); \
164 register long _arg4 __asm__ ("r3") = (long)(arg4); \
165 \
166 __asm__ volatile ( \
167 _NOLIBC_THUMB_SET_R7 \
168 "svc #0\n" \
169 _NOLIBC_THUMB_RESTORE_R7 \
170 : "=r"(_arg1), "=r" (_num) \
171 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \
172 "r"(_num) \
173 : "memory", "cc", "lr" \
174 ); \
175 _arg1; \
176 })
177
178 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
179 ({ \
180 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
181 register long _arg1 __asm__ ("r0") = (long)(arg1); \
182 register long _arg2 __asm__ ("r1") = (long)(arg2); \
183 register long _arg3 __asm__ ("r2") = (long)(arg3); \
184 register long _arg4 __asm__ ("r3") = (long)(arg4); \
185 register long _arg5 __asm__ ("r4") = (long)(arg5); \
186 \
187 __asm__ volatile ( \
188 _NOLIBC_THUMB_SET_R7 \
189 "svc #0\n" \
190 _NOLIBC_THUMB_RESTORE_R7 \
191 : "=r"(_arg1), "=r" (_num) \
192 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
193 "r"(_num) \
194 : "memory", "cc", "lr" \
195 ); \
196 _arg1; \
197 })
198
199 char **environ __attribute__((weak));
200 const unsigned long *_auxv __attribute__((weak));
201
202 /* startup code */
_start(void)203 void __attribute__((weak,noreturn,optimize("omit-frame-pointer"))) _start(void)
204 {
205 __asm__ volatile (
206 "pop {%r0}\n" // argc was in the stack
207 "mov %r1, %sp\n" // argv = sp
208
209 "add %r2, %r0, $1\n" // envp = (argc + 1) ...
210 "lsl %r2, %r2, $2\n" // * 4 ...
211 "add %r2, %r2, %r1\n" // + argv
212 "ldr %r3, 1f\n" // r3 = &environ (see below)
213 "str %r2, [r3]\n" // store envp into environ
214
215 "mov r4, r2\n" // search for auxv (follows NULL after last env)
216 "0:\n"
217 "mov r5, r4\n" // r5 = r4
218 "add r4, r4, #4\n" // r4 += 4
219 "ldr r5,[r5]\n" // r5 = *r5 = *(r4-4)
220 "cmp r5, #0\n" // and stop at NULL after last env
221 "bne 0b\n"
222 "ldr %r3, 2f\n" // r3 = &_auxv (low bits)
223 "str r4, [r3]\n" // store r4 into _auxv
224
225 "mov %r3, $8\n" // AAPCS : sp must be 8-byte aligned in the
226 "neg %r3, %r3\n" // callee, and bl doesn't push (lr=pc)
227 "and %r3, %r3, %r1\n" // so we do sp = r1(=sp) & r3(=-8);
228 "mov %sp, %r3\n" //
229
230 "bl main\n" // main() returns the status code, we'll exit with it.
231 "movs r7, $1\n" // NR_exit == 1
232 "svc $0x00\n"
233 ".align 2\n" // below are the pointers to a few variables
234 "1:\n"
235 ".word environ\n"
236 "2:\n"
237 ".word _auxv\n"
238 );
239 __builtin_unreachable();
240 }
241
242 #endif // _NOLIBC_ARCH_ARM_H
243