]>
Commit | Line | Data |
---|---|---|
696d846a MO |
1 | //===-- sanitizer_syscall_linux_aarch64.inc --------------------*- C++ -*-===// |
2 | // | |
3 | // This file is distributed under the University of Illinois Open Source | |
4 | // License. See LICENSE.TXT for details. | |
5 | // | |
6 | //===----------------------------------------------------------------------===// | |
7 | // | |
8 | // Implementations of internal_syscall and internal_iserror for Linux/aarch64. | |
9 | // | |
10 | //===----------------------------------------------------------------------===// | |
11 | ||
12 | #define SYSCALL(name) __NR_ ## name | |
13 | ||
14 | static uptr __internal_syscall(u64 nr) { | |
15 | register u64 x8 asm("x8") = nr; | |
16 | register u64 x0 asm("x0"); | |
17 | asm volatile("svc 0" | |
18 | : "=r"(x0) | |
19 | : "r"(x8) | |
20 | : "memory", "cc"); | |
21 | return x0; | |
22 | } | |
23 | #define __internal_syscall0(n) \ | |
24 | (__internal_syscall)(n) | |
25 | ||
26 | static uptr __internal_syscall(u64 nr, u64 arg1) { | |
27 | register u64 x8 asm("x8") = nr; | |
28 | register u64 x0 asm("x0") = arg1; | |
29 | asm volatile("svc 0" | |
30 | : "=r"(x0) | |
31 | : "r"(x8), "0"(x0) | |
32 | : "memory", "cc"); | |
33 | return x0; | |
34 | } | |
35 | #define __internal_syscall1(n, a1) \ | |
36 | (__internal_syscall)(n, (u64)(a1)) | |
37 | ||
38 | static uptr __internal_syscall(u64 nr, u64 arg1, long arg2) { | |
39 | register u64 x8 asm("x8") = nr; | |
40 | register u64 x0 asm("x0") = arg1; | |
41 | register u64 x1 asm("x1") = arg2; | |
42 | asm volatile("svc 0" | |
43 | : "=r"(x0) | |
44 | : "r"(x8), "0"(x0), "r"(x1) | |
45 | : "memory", "cc"); | |
46 | return x0; | |
47 | } | |
48 | #define __internal_syscall2(n, a1, a2) \ | |
49 | (__internal_syscall)(n, (u64)(a1), (long)(a2)) | |
50 | ||
51 | static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3) { | |
52 | register u64 x8 asm("x8") = nr; | |
53 | register u64 x0 asm("x0") = arg1; | |
54 | register u64 x1 asm("x1") = arg2; | |
55 | register u64 x2 asm("x2") = arg3; | |
56 | asm volatile("svc 0" | |
57 | : "=r"(x0) | |
58 | : "r"(x8), "0"(x0), "r"(x1), "r"(x2) | |
59 | : "memory", "cc"); | |
60 | return x0; | |
61 | } | |
62 | #define __internal_syscall3(n, a1, a2, a3) \ | |
63 | (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3)) | |
64 | ||
65 | static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3, | |
66 | u64 arg4) { | |
67 | register u64 x8 asm("x8") = nr; | |
68 | register u64 x0 asm("x0") = arg1; | |
69 | register u64 x1 asm("x1") = arg2; | |
70 | register u64 x2 asm("x2") = arg3; | |
71 | register u64 x3 asm("x3") = arg4; | |
72 | asm volatile("svc 0" | |
73 | : "=r"(x0) | |
74 | : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3) | |
75 | : "memory", "cc"); | |
76 | return x0; | |
77 | } | |
78 | #define __internal_syscall4(n, a1, a2, a3, a4) \ | |
79 | (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4)) | |
80 | ||
81 | static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3, | |
82 | u64 arg4, long arg5) { | |
83 | register u64 x8 asm("x8") = nr; | |
84 | register u64 x0 asm("x0") = arg1; | |
85 | register u64 x1 asm("x1") = arg2; | |
86 | register u64 x2 asm("x2") = arg3; | |
87 | register u64 x3 asm("x3") = arg4; | |
88 | register u64 x4 asm("x4") = arg5; | |
89 | asm volatile("svc 0" | |
90 | : "=r"(x0) | |
91 | : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4) | |
92 | : "memory", "cc"); | |
93 | return x0; | |
94 | } | |
95 | #define __internal_syscall5(n, a1, a2, a3, a4, a5) \ | |
96 | (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \ | |
97 | (u64)(a5)) | |
98 | ||
99 | static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3, | |
100 | u64 arg4, long arg5, long arg6) { | |
101 | register u64 x8 asm("x8") = nr; | |
102 | register u64 x0 asm("x0") = arg1; | |
103 | register u64 x1 asm("x1") = arg2; | |
104 | register u64 x2 asm("x2") = arg3; | |
105 | register u64 x3 asm("x3") = arg4; | |
106 | register u64 x4 asm("x4") = arg5; | |
107 | register u64 x5 asm("x5") = arg6; | |
108 | asm volatile("svc 0" | |
109 | : "=r"(x0) | |
110 | : "r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4), "r"(x5) | |
111 | : "memory", "cc"); | |
112 | return x0; | |
113 | } | |
114 | #define __internal_syscall6(n, a1, a2, a3, a4, a5, a6) \ | |
115 | (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \ | |
116 | (u64)(a5), (long)(a6)) | |
117 | ||
118 | #define __SYSCALL_NARGS_X(a1, a2, a3, a4, a5, a6, a7, a8, n, ...) n | |
119 | #define __SYSCALL_NARGS(...) \ | |
120 | __SYSCALL_NARGS_X(__VA_ARGS__, 7, 6, 5, 4, 3, 2, 1, 0, ) | |
121 | #define __SYSCALL_CONCAT_X(a, b) a##b | |
122 | #define __SYSCALL_CONCAT(a, b) __SYSCALL_CONCAT_X(a, b) | |
123 | #define __SYSCALL_DISP(b, ...) \ | |
124 | __SYSCALL_CONCAT(b, __SYSCALL_NARGS(__VA_ARGS__))(__VA_ARGS__) | |
125 | ||
126 | #define internal_syscall(...) __SYSCALL_DISP(__internal_syscall, __VA_ARGS__) | |
127 | ||
128 | // Helper function used to avoid cobbler errno. | |
129 | bool internal_iserror(uptr retval, int *rverrno) { | |
130 | if (retval >= (uptr)-4095) { | |
131 | if (rverrno) | |
132 | *rverrno = -retval; | |
133 | return true; | |
134 | } | |
135 | return false; | |
136 | } |