Skip to content

Commit f01d4c8

Browse files
committed
Merge tag 'nolibc.2023.02.06a' of git://git.kernel.org/pub/scm/linux/kernel/git/paulmck/linux-rcu
Pull nolibc updates from Paul McKenney: - Add s390 support - Add support for the ARM Thumb1 instruction set - Fix O_* flags definitions for open() and fcntl() - Make errno a weak symbol instead of a static variable - Export environ as a weak symbol - Export _auxv as a weak symbol for auxilliary vector retrieval - Implement getauxval() and getpagesize() - Further improve self tests, including permitting userland testing of the nolibc library * tag 'nolibc.2023.02.06a' of git://git.kernel.org/pub/scm/linux/kernel/git/paulmck/linux-rcu: (28 commits) selftests/nolibc: Add a "run-user" target to test the program in user land selftests/nolibc: Support "x86_64" for arch name selftests/nolibc: Add `getpagesize(2)` selftest nolibc/sys: Implement `getpagesize(2)` function nolibc/stdlib: Implement `getauxval(3)` function tools/nolibc: add auxiliary vector retrieval for s390 tools/nolibc: add auxiliary vector retrieval for mips tools/nolibc: add auxiliary vector retrieval for riscv tools/nolibc: add auxiliary vector retrieval for arm tools/nolibc: add auxiliary vector retrieval for arm64 tools/nolibc: add auxiliary vector retrieval for x86_64 tools/nolibc: add auxiliary vector retrieval for i386 tools/nolibc: export environ as a weak symbol on s390 tools/nolibc: export environ as a weak symbol on riscv tools/nolibc: export environ as a weak symbol on mips tools/nolibc: export environ as a weak symbol on arm tools/nolibc: export environ as a weak symbol on arm64 tools/nolibc: export environ as a weak symbol on i386 tools/nolibc: export environ as a weak symbol on x86_64 tools/nolibc: make errno a weak symbol instead of a static one ...
2 parents 525445e + c54ba41 commit f01d4c8

File tree

15 files changed

+579
-202
lines changed

15 files changed

+579
-202
lines changed

tools/include/nolibc/arch-aarch64.h

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,6 @@
77
#ifndef _NOLIBC_ARCH_AARCH64_H
88
#define _NOLIBC_ARCH_AARCH64_H
99

10-
/* O_* macros for fcntl/open are architecture-specific */
11-
#define O_RDONLY 0
12-
#define O_WRONLY 1
13-
#define O_RDWR 2
14-
#define O_CREAT 0x40
15-
#define O_EXCL 0x80
16-
#define O_NOCTTY 0x100
17-
#define O_TRUNC 0x200
18-
#define O_APPEND 0x400
19-
#define O_NONBLOCK 0x800
20-
#define O_DIRECTORY 0x4000
21-
2210
/* The struct returned by the newfstatat() syscall. Differs slightly from the
2311
* x86_64's stat one by field ordering, so be careful.
2412
*/
@@ -181,19 +169,31 @@ struct sys_stat_struct {
181169
_arg1; \
182170
})
183171

184-
/* startup code */
185-
__asm__ (".section .text\n"
186-
".weak _start\n"
187-
"_start:\n"
188-
"ldr x0, [sp]\n" // argc (x0) was in the stack
189-
"add x1, sp, 8\n" // argv (x1) = sp
190-
"lsl x2, x0, 3\n" // envp (x2) = 8*argc ...
191-
"add x2, x2, 8\n" // + 8 (skip null)
192-
"add x2, x2, x1\n" // + argv
193-
"and sp, x1, -16\n" // sp must be 16-byte aligned in the callee
194-
"bl main\n" // main() returns the status code, we'll exit with it.
195-
"mov x8, 93\n" // NR_exit == 93
196-
"svc #0\n"
197-
"");
172+
char **environ __attribute__((weak));
173+
const unsigned long *_auxv __attribute__((weak));
198174

175+
/* startup code */
176+
void __attribute__((weak,noreturn,optimize("omit-frame-pointer"))) _start(void)
177+
{
178+
__asm__ volatile (
179+
"ldr x0, [sp]\n" // argc (x0) was in the stack
180+
"add x1, sp, 8\n" // argv (x1) = sp
181+
"lsl x2, x0, 3\n" // envp (x2) = 8*argc ...
182+
"add x2, x2, 8\n" // + 8 (skip null)
183+
"add x2, x2, x1\n" // + argv
184+
"adrp x3, environ\n" // x3 = &environ (high bits)
185+
"str x2, [x3, #:lo12:environ]\n" // store envp into environ
186+
"mov x4, x2\n" // search for auxv (follows NULL after last env)
187+
"0:\n"
188+
"ldr x5, [x4], 8\n" // x5 = *x4; x4 += 8
189+
"cbnz x5, 0b\n" // and stop at NULL after last env
190+
"adrp x3, _auxv\n" // x3 = &_auxv (high bits)
191+
"str x4, [x3, #:lo12:_auxv]\n" // store x4 into _auxv
192+
"and sp, x1, -16\n" // sp must be 16-byte aligned in the callee
193+
"bl main\n" // main() returns the status code, we'll exit with it.
194+
"mov x8, 93\n" // NR_exit == 93
195+
"svc #0\n"
196+
);
197+
__builtin_unreachable();
198+
}
199199
#endif // _NOLIBC_ARCH_AARCH64_H

tools/include/nolibc/arch-arm.h

Lines changed: 88 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,6 @@
77
#ifndef _NOLIBC_ARCH_ARM_H
88
#define _NOLIBC_ARCH_ARM_H
99

10-
/* O_* macros for fcntl/open are architecture-specific */
11-
#define O_RDONLY 0
12-
#define O_WRONLY 1
13-
#define O_RDWR 2
14-
#define O_CREAT 0x40
15-
#define O_EXCL 0x80
16-
#define O_NOCTTY 0x100
17-
#define O_TRUNC 0x200
18-
#define O_APPEND 0x400
19-
#define O_NONBLOCK 0x800
20-
#define O_DIRECTORY 0x4000
21-
2210
/* The struct returned by the stat() syscall, 32-bit only, the syscall returns
2311
* exactly 56 bytes (stops before the unused array). In big endian, the format
2412
* differs as devices are returned as short only.
@@ -70,33 +58,59 @@ struct sys_stat_struct {
7058
* don't have to experience issues with register constraints.
7159
* - the syscall number is always specified last in order to allow to force
7260
* some registers before (gcc refuses a %-register at the last position).
61+
* - in thumb mode without -fomit-frame-pointer, r7 is also used to store the
62+
* frame pointer, and we cannot directly assign it as a register variable,
63+
* nor can we clobber it. Instead we assign the r6 register and swap it
64+
* with r7 before calling svc, and r6 is marked as clobbered.
65+
* We're just using any regular register which we assign to r7 after saving
66+
* it.
7367
*
7468
* Also, ARM supports the old_select syscall if newselect is not available
7569
*/
7670
#define __ARCH_WANT_SYS_OLD_SELECT
7771

72+
#if (defined(__THUMBEB__) || defined(__THUMBEL__)) && \
73+
!defined(NOLIBC_OMIT_FRAME_POINTER)
74+
/* swap r6,r7 needed in Thumb mode since we can't use nor clobber r7 */
75+
#define _NOLIBC_SYSCALL_REG "r6"
76+
#define _NOLIBC_THUMB_SET_R7 "eor r7, r6\neor r6, r7\neor r7, r6\n"
77+
#define _NOLIBC_THUMB_RESTORE_R7 "mov r7, r6\n"
78+
79+
#else /* we're in ARM mode */
80+
/* in Arm mode we can directly use r7 */
81+
#define _NOLIBC_SYSCALL_REG "r7"
82+
#define _NOLIBC_THUMB_SET_R7 ""
83+
#define _NOLIBC_THUMB_RESTORE_R7 ""
84+
85+
#endif /* end THUMB */
86+
7887
#define my_syscall0(num) \
7988
({ \
80-
register long _num __asm__ ("r7") = (num); \
89+
register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
8190
register long _arg1 __asm__ ("r0"); \
8291
\
8392
__asm__ volatile ( \
93+
_NOLIBC_THUMB_SET_R7 \
8494
"svc #0\n" \
85-
: "=r"(_arg1) \
86-
: "r"(_num) \
95+
_NOLIBC_THUMB_RESTORE_R7 \
96+
: "=r"(_arg1), "=r"(_num) \
97+
: "r"(_arg1), \
98+
"r"(_num) \
8799
: "memory", "cc", "lr" \
88100
); \
89101
_arg1; \
90102
})
91103

92104
#define my_syscall1(num, arg1) \
93105
({ \
94-
register long _num __asm__ ("r7") = (num); \
106+
register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
95107
register long _arg1 __asm__ ("r0") = (long)(arg1); \
96108
\
97109
__asm__ volatile ( \
110+
_NOLIBC_THUMB_SET_R7 \
98111
"svc #0\n" \
99-
: "=r"(_arg1) \
112+
_NOLIBC_THUMB_RESTORE_R7 \
113+
: "=r"(_arg1), "=r" (_num) \
100114
: "r"(_arg1), \
101115
"r"(_num) \
102116
: "memory", "cc", "lr" \
@@ -106,13 +120,15 @@ struct sys_stat_struct {
106120

107121
#define my_syscall2(num, arg1, arg2) \
108122
({ \
109-
register long _num __asm__ ("r7") = (num); \
123+
register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
110124
register long _arg1 __asm__ ("r0") = (long)(arg1); \
111125
register long _arg2 __asm__ ("r1") = (long)(arg2); \
112126
\
113127
__asm__ volatile ( \
128+
_NOLIBC_THUMB_SET_R7 \
114129
"svc #0\n" \
115-
: "=r"(_arg1) \
130+
_NOLIBC_THUMB_RESTORE_R7 \
131+
: "=r"(_arg1), "=r" (_num) \
116132
: "r"(_arg1), "r"(_arg2), \
117133
"r"(_num) \
118134
: "memory", "cc", "lr" \
@@ -122,14 +138,16 @@ struct sys_stat_struct {
122138

123139
#define my_syscall3(num, arg1, arg2, arg3) \
124140
({ \
125-
register long _num __asm__ ("r7") = (num); \
141+
register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
126142
register long _arg1 __asm__ ("r0") = (long)(arg1); \
127143
register long _arg2 __asm__ ("r1") = (long)(arg2); \
128144
register long _arg3 __asm__ ("r2") = (long)(arg3); \
129145
\
130146
__asm__ volatile ( \
147+
_NOLIBC_THUMB_SET_R7 \
131148
"svc #0\n" \
132-
: "=r"(_arg1) \
149+
_NOLIBC_THUMB_RESTORE_R7 \
150+
: "=r"(_arg1), "=r" (_num) \
133151
: "r"(_arg1), "r"(_arg2), "r"(_arg3), \
134152
"r"(_num) \
135153
: "memory", "cc", "lr" \
@@ -139,15 +157,17 @@ struct sys_stat_struct {
139157

140158
#define my_syscall4(num, arg1, arg2, arg3, arg4) \
141159
({ \
142-
register long _num __asm__ ("r7") = (num); \
160+
register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
143161
register long _arg1 __asm__ ("r0") = (long)(arg1); \
144162
register long _arg2 __asm__ ("r1") = (long)(arg2); \
145163
register long _arg3 __asm__ ("r2") = (long)(arg3); \
146164
register long _arg4 __asm__ ("r3") = (long)(arg4); \
147165
\
148166
__asm__ volatile ( \
167+
_NOLIBC_THUMB_SET_R7 \
149168
"svc #0\n" \
150-
: "=r"(_arg1) \
169+
_NOLIBC_THUMB_RESTORE_R7 \
170+
: "=r"(_arg1), "=r" (_num) \
151171
: "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \
152172
"r"(_num) \
153173
: "memory", "cc", "lr" \
@@ -157,48 +177,66 @@ struct sys_stat_struct {
157177

158178
#define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
159179
({ \
160-
register long _num __asm__ ("r7") = (num); \
180+
register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
161181
register long _arg1 __asm__ ("r0") = (long)(arg1); \
162182
register long _arg2 __asm__ ("r1") = (long)(arg2); \
163183
register long _arg3 __asm__ ("r2") = (long)(arg3); \
164184
register long _arg4 __asm__ ("r3") = (long)(arg4); \
165185
register long _arg5 __asm__ ("r4") = (long)(arg5); \
166186
\
167187
__asm__ volatile ( \
188+
_NOLIBC_THUMB_SET_R7 \
168189
"svc #0\n" \
169-
: "=r" (_arg1) \
190+
_NOLIBC_THUMB_RESTORE_R7 \
191+
: "=r"(_arg1), "=r" (_num) \
170192
: "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
171193
"r"(_num) \
172194
: "memory", "cc", "lr" \
173195
); \
174196
_arg1; \
175197
})
176198

199+
char **environ __attribute__((weak));
200+
const unsigned long *_auxv __attribute__((weak));
201+
177202
/* startup code */
178-
__asm__ (".section .text\n"
179-
".weak _start\n"
180-
"_start:\n"
181-
#if defined(__THUMBEB__) || defined(__THUMBEL__)
182-
/* We enter here in 32-bit mode but if some previous functions were in
183-
* 16-bit mode, the assembler cannot know, so we need to tell it we're in
184-
* 32-bit now, then switch to 16-bit (is there a better way to do it than
185-
* adding 1 by hand ?) and tell the asm we're now in 16-bit mode so that
186-
* it generates correct instructions. Note that we do not support thumb1.
187-
*/
188-
".code 32\n"
189-
"add r0, pc, #1\n"
190-
"bx r0\n"
191-
".code 16\n"
192-
#endif
193-
"pop {%r0}\n" // argc was in the stack
194-
"mov %r1, %sp\n" // argv = sp
195-
"add %r2, %r1, %r0, lsl #2\n" // envp = argv + 4*argc ...
196-
"add %r2, %r2, $4\n" // ... + 4
197-
"and %r3, %r1, $-8\n" // AAPCS : sp must be 8-byte aligned in the
198-
"mov %sp, %r3\n" // callee, an bl doesn't push (lr=pc)
199-
"bl main\n" // main() returns the status code, we'll exit with it.
200-
"movs r7, $1\n" // NR_exit == 1
201-
"svc $0x00\n"
202-
"");
203+
void __attribute__((weak,noreturn,optimize("omit-frame-pointer"))) _start(void)
204+
{
205+
__asm__ volatile (
206+
"pop {%r0}\n" // argc was in the stack
207+
"mov %r1, %sp\n" // argv = sp
208+
209+
"add %r2, %r0, $1\n" // envp = (argc + 1) ...
210+
"lsl %r2, %r2, $2\n" // * 4 ...
211+
"add %r2, %r2, %r1\n" // + argv
212+
"ldr %r3, 1f\n" // r3 = &environ (see below)
213+
"str %r2, [r3]\n" // store envp into environ
214+
215+
"mov r4, r2\n" // search for auxv (follows NULL after last env)
216+
"0:\n"
217+
"mov r5, r4\n" // r5 = r4
218+
"add r4, r4, #4\n" // r4 += 4
219+
"ldr r5,[r5]\n" // r5 = *r5 = *(r4-4)
220+
"cmp r5, #0\n" // and stop at NULL after last env
221+
"bne 0b\n"
222+
"ldr %r3, 2f\n" // r3 = &_auxv (low bits)
223+
"str r4, [r3]\n" // store r4 into _auxv
224+
225+
"mov %r3, $8\n" // AAPCS : sp must be 8-byte aligned in the
226+
"neg %r3, %r3\n" // callee, and bl doesn't push (lr=pc)
227+
"and %r3, %r3, %r1\n" // so we do sp = r1(=sp) & r3(=-8);
228+
"mov %sp, %r3\n" //
229+
230+
"bl main\n" // main() returns the status code, we'll exit with it.
231+
"movs r7, $1\n" // NR_exit == 1
232+
"svc $0x00\n"
233+
".align 2\n" // below are the pointers to a few variables
234+
"1:\n"
235+
".word environ\n"
236+
"2:\n"
237+
".word _auxv\n"
238+
);
239+
__builtin_unreachable();
240+
}
203241

204242
#endif // _NOLIBC_ARCH_ARM_H

tools/include/nolibc/arch-i386.h

Lines changed: 30 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,6 @@
77
#ifndef _NOLIBC_ARCH_I386_H
88
#define _NOLIBC_ARCH_I386_H
99

10-
/* O_* macros for fcntl/open are architecture-specific */
11-
#define O_RDONLY 0
12-
#define O_WRONLY 1
13-
#define O_RDWR 2
14-
#define O_CREAT 0x40
15-
#define O_EXCL 0x80
16-
#define O_NOCTTY 0x100
17-
#define O_TRUNC 0x200
18-
#define O_APPEND 0x400
19-
#define O_NONBLOCK 0x800
20-
#define O_DIRECTORY 0x10000
21-
2210
/* The struct returned by the stat() syscall, 32-bit only, the syscall returns
2311
* exactly 56 bytes (stops before the unused array).
2412
*/
@@ -190,30 +178,42 @@ struct sys_stat_struct {
190178
_eax; \
191179
})
192180

181+
char **environ __attribute__((weak));
182+
const unsigned long *_auxv __attribute__((weak));
183+
193184
/* startup code */
194185
/*
195186
* i386 System V ABI mandates:
196187
* 1) last pushed argument must be 16-byte aligned.
197188
* 2) The deepest stack frame should be set to zero
198189
*
199190
*/
200-
__asm__ (".section .text\n"
201-
".weak _start\n"
202-
"_start:\n"
203-
"pop %eax\n" // argc (first arg, %eax)
204-
"mov %esp, %ebx\n" // argv[] (second arg, %ebx)
205-
"lea 4(%ebx,%eax,4),%ecx\n" // then a NULL then envp (third arg, %ecx)
206-
"xor %ebp, %ebp\n" // zero the stack frame
207-
"and $-16, %esp\n" // x86 ABI : esp must be 16-byte aligned before
208-
"sub $4, %esp\n" // the call instruction (args are aligned)
209-
"push %ecx\n" // push all registers on the stack so that we
210-
"push %ebx\n" // support both regparm and plain stack modes
211-
"push %eax\n"
212-
"call main\n" // main() returns the status code in %eax
213-
"mov %eax, %ebx\n" // retrieve exit code (32-bit int)
214-
"movl $1, %eax\n" // NR_exit == 1
215-
"int $0x80\n" // exit now
216-
"hlt\n" // ensure it does not
217-
"");
191+
void __attribute__((weak,noreturn,optimize("omit-frame-pointer"))) _start(void)
192+
{
193+
__asm__ volatile (
194+
"pop %eax\n" // argc (first arg, %eax)
195+
"mov %esp, %ebx\n" // argv[] (second arg, %ebx)
196+
"lea 4(%ebx,%eax,4),%ecx\n" // then a NULL then envp (third arg, %ecx)
197+
"mov %ecx, environ\n" // save environ
198+
"xor %ebp, %ebp\n" // zero the stack frame
199+
"mov %ecx, %edx\n" // search for auxv (follows NULL after last env)
200+
"0:\n"
201+
"add $4, %edx\n" // search for auxv using edx, it follows the
202+
"cmp -4(%edx), %ebp\n" // ... NULL after last env (ebp is zero here)
203+
"jnz 0b\n"
204+
"mov %edx, _auxv\n" // save it into _auxv
205+
"and $-16, %esp\n" // x86 ABI : esp must be 16-byte aligned before
206+
"sub $4, %esp\n" // the call instruction (args are aligned)
207+
"push %ecx\n" // push all registers on the stack so that we
208+
"push %ebx\n" // support both regparm and plain stack modes
209+
"push %eax\n"
210+
"call main\n" // main() returns the status code in %eax
211+
"mov %eax, %ebx\n" // retrieve exit code (32-bit int)
212+
"movl $1, %eax\n" // NR_exit == 1
213+
"int $0x80\n" // exit now
214+
"hlt\n" // ensure it does not
215+
);
216+
__builtin_unreachable();
217+
}
218218

219219
#endif // _NOLIBC_ARCH_I386_H

0 commit comments

Comments
 (0)