Warning: This file is not a C or C++ file. It does not have highlighting.

1/* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2/*
3 * ARM specific definitions for NOLIBC
4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu>
5 */
6
7#ifndef _NOLIBC_ARCH_ARM_H
8#define _NOLIBC_ARCH_ARM_H
9
10#include "compiler.h"
11#include "crt.h"
12
13/* Syscalls for ARM in ARM or Thumb modes :
14 * - registers are 32-bit
15 * - stack is 8-byte aligned
16 * ( http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.faqs/ka4127.html)
17 * - syscall number is passed in r7
18 * - arguments are in r0, r1, r2, r3, r4, r5
19 * - the system call is performed by calling svc #0
20 * - syscall return comes in r0.
21 * - only lr is clobbered.
22 * - the arguments are cast to long and assigned into the target registers
23 * which are then simply passed as registers to the asm code, so that we
24 * don't have to experience issues with register constraints.
25 * - the syscall number is always specified last in order to allow to force
26 * some registers before (gcc refuses a %-register at the last position).
27 * - in thumb mode without -fomit-frame-pointer, r7 is also used to store the
28 * frame pointer, and we cannot directly assign it as a register variable,
29 * nor can we clobber it. Instead we assign the r6 register and swap it
30 * with r7 before calling svc, and r6 is marked as clobbered.
31 * We're just using any regular register which we assign to r7 after saving
32 * it.
33 *
34 * Also, ARM supports the old_select syscall if newselect is not available
35 */
36#define __ARCH_WANT_SYS_OLD_SELECT
37
38#if (defined(__THUMBEB__) || defined(__THUMBEL__)) && \
39 !defined(NOLIBC_OMIT_FRAME_POINTER)
40/* swap r6,r7 needed in Thumb mode since we can't use nor clobber r7 */
41#define _NOLIBC_SYSCALL_REG "r6"
42#define _NOLIBC_THUMB_SET_R7 "eor r7, r6\neor r6, r7\neor r7, r6\n"
43#define _NOLIBC_THUMB_RESTORE_R7 "mov r7, r6\n"
44
45#else /* we're in ARM mode */
46/* in Arm mode we can directly use r7 */
47#define _NOLIBC_SYSCALL_REG "r7"
48#define _NOLIBC_THUMB_SET_R7 ""
49#define _NOLIBC_THUMB_RESTORE_R7 ""
50
51#endif /* end THUMB */
52
53#define my_syscall0(num) \
54({ \
55 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
56 register long _arg1 __asm__ ("r0"); \
57 \
58 __asm__ volatile ( \
59 _NOLIBC_THUMB_SET_R7 \
60 "svc #0\n" \
61 _NOLIBC_THUMB_RESTORE_R7 \
62 : "=r"(_arg1), "=r"(_num) \
63 : "r"(_arg1), \
64 "r"(_num) \
65 : "memory", "cc", "lr" \
66 ); \
67 _arg1; \
68})
69
70#define my_syscall1(num, arg1) \
71({ \
72 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
73 register long _arg1 __asm__ ("r0") = (long)(arg1); \
74 \
75 __asm__ volatile ( \
76 _NOLIBC_THUMB_SET_R7 \
77 "svc #0\n" \
78 _NOLIBC_THUMB_RESTORE_R7 \
79 : "=r"(_arg1), "=r" (_num) \
80 : "r"(_arg1), \
81 "r"(_num) \
82 : "memory", "cc", "lr" \
83 ); \
84 _arg1; \
85})
86
87#define my_syscall2(num, arg1, arg2) \
88({ \
89 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
90 register long _arg1 __asm__ ("r0") = (long)(arg1); \
91 register long _arg2 __asm__ ("r1") = (long)(arg2); \
92 \
93 __asm__ volatile ( \
94 _NOLIBC_THUMB_SET_R7 \
95 "svc #0\n" \
96 _NOLIBC_THUMB_RESTORE_R7 \
97 : "=r"(_arg1), "=r" (_num) \
98 : "r"(_arg1), "r"(_arg2), \
99 "r"(_num) \
100 : "memory", "cc", "lr" \
101 ); \
102 _arg1; \
103})
104
105#define my_syscall3(num, arg1, arg2, arg3) \
106({ \
107 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
108 register long _arg1 __asm__ ("r0") = (long)(arg1); \
109 register long _arg2 __asm__ ("r1") = (long)(arg2); \
110 register long _arg3 __asm__ ("r2") = (long)(arg3); \
111 \
112 __asm__ volatile ( \
113 _NOLIBC_THUMB_SET_R7 \
114 "svc #0\n" \
115 _NOLIBC_THUMB_RESTORE_R7 \
116 : "=r"(_arg1), "=r" (_num) \
117 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \
118 "r"(_num) \
119 : "memory", "cc", "lr" \
120 ); \
121 _arg1; \
122})
123
124#define my_syscall4(num, arg1, arg2, arg3, arg4) \
125({ \
126 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
127 register long _arg1 __asm__ ("r0") = (long)(arg1); \
128 register long _arg2 __asm__ ("r1") = (long)(arg2); \
129 register long _arg3 __asm__ ("r2") = (long)(arg3); \
130 register long _arg4 __asm__ ("r3") = (long)(arg4); \
131 \
132 __asm__ volatile ( \
133 _NOLIBC_THUMB_SET_R7 \
134 "svc #0\n" \
135 _NOLIBC_THUMB_RESTORE_R7 \
136 : "=r"(_arg1), "=r" (_num) \
137 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \
138 "r"(_num) \
139 : "memory", "cc", "lr" \
140 ); \
141 _arg1; \
142})
143
144#define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
145({ \
146 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
147 register long _arg1 __asm__ ("r0") = (long)(arg1); \
148 register long _arg2 __asm__ ("r1") = (long)(arg2); \
149 register long _arg3 __asm__ ("r2") = (long)(arg3); \
150 register long _arg4 __asm__ ("r3") = (long)(arg4); \
151 register long _arg5 __asm__ ("r4") = (long)(arg5); \
152 \
153 __asm__ volatile ( \
154 _NOLIBC_THUMB_SET_R7 \
155 "svc #0\n" \
156 _NOLIBC_THUMB_RESTORE_R7 \
157 : "=r"(_arg1), "=r" (_num) \
158 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
159 "r"(_num) \
160 : "memory", "cc", "lr" \
161 ); \
162 _arg1; \
163})
164
165#define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \
166({ \
167 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
168 register long _arg1 __asm__ ("r0") = (long)(arg1); \
169 register long _arg2 __asm__ ("r1") = (long)(arg2); \
170 register long _arg3 __asm__ ("r2") = (long)(arg3); \
171 register long _arg4 __asm__ ("r3") = (long)(arg4); \
172 register long _arg5 __asm__ ("r4") = (long)(arg5); \
173 register long _arg6 __asm__ ("r5") = (long)(arg6); \
174 \
175 __asm__ volatile ( \
176 _NOLIBC_THUMB_SET_R7 \
177 "svc #0\n" \
178 _NOLIBC_THUMB_RESTORE_R7 \
179 : "=r"(_arg1), "=r" (_num) \
180 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
181 "r"(_arg6), "r"(_num) \
182 : "memory", "cc", "lr" \
183 ); \
184 _arg1; \
185})
186
187/* startup code */
188void __attribute__((weak, noreturn, optimize("Os", "omit-frame-pointer"))) __no_stack_protector _start(void)
189{
190 __asm__ volatile (
191 "mov %r0, sp\n" /* save stack pointer to %r0, as arg1 of _start_c */
192 "and ip, %r0, #-8\n" /* sp must be 8-byte aligned in the callee */
193 "mov sp, ip\n"
194 "bl _start_c\n" /* transfer to c runtime */
195 );
196 __builtin_unreachable();
197}
198
199#endif /* _NOLIBC_ARCH_ARM_H */
200

Warning: This file is not a C or C++ file. It does not have highlighting.

source code of linux/tools/include/nolibc/arch-arm.h