1//===-- sanitizer_syscall_linux_loongarch64.inc -----------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// Implementations of internal_syscall and internal_iserror for
10// Linux/loongarch64.
11//
12//===----------------------------------------------------------------------===//
13
14// About local register variables:
15// https://gcc.gnu.org/onlinedocs/gcc/Local-Register-Variables.html#Local-Register-Variables
16//
17// Kernel ABI...
18//  syscall number is passed in a7
19//  (http://man7.org/linux/man-pages/man2/syscall.2.html) results are return in
20//  a0 and a1 (http://man7.org/linux/man-pages/man2/syscall.2.html) arguments
21//  are passed in: a0-a7 (confirmed by inspecting glibc sources).
22#define SYSCALL(name) __NR_##name
23
24#define INTERNAL_SYSCALL_CLOBBERS "memory"
25
26static uptr __internal_syscall(u64 nr) {
27  register u64 a7 asm("a7") = nr;
28  register u64 a0 asm("a0");
29  __asm__ volatile("syscall 0\n\t"
30                   : "=r"(a0)
31                   : "r"(a7)
32                   : INTERNAL_SYSCALL_CLOBBERS);
33  return a0;
34}
35#define __internal_syscall0(n) (__internal_syscall)(n)
36
37static uptr __internal_syscall(u64 nr, u64 arg1) {
38  register u64 a7 asm("a7") = nr;
39  register u64 a0 asm("a0") = arg1;
40  __asm__ volatile("syscall 0\n\t"
41                   : "+r"(a0)
42                   : "r"(a7)
43                   : INTERNAL_SYSCALL_CLOBBERS);
44  return a0;
45}
46#define __internal_syscall1(n, a1) (__internal_syscall)(n, (u64)(a1))
47
48static uptr __internal_syscall(u64 nr, u64 arg1, long arg2) {
49  register u64 a7 asm("a7") = nr;
50  register u64 a0 asm("a0") = arg1;
51  register u64 a1 asm("a1") = arg2;
52  __asm__ volatile("syscall 0\n\t"
53                   : "+r"(a0)
54                   : "r"(a7), "r"(a1)
55                   : INTERNAL_SYSCALL_CLOBBERS);
56  return a0;
57}
58#define __internal_syscall2(n, a1, a2) \
59  (__internal_syscall)(n, (u64)(a1), (long)(a2))
60
61static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3) {
62  register u64 a7 asm("a7") = nr;
63  register u64 a0 asm("a0") = arg1;
64  register u64 a1 asm("a1") = arg2;
65  register u64 a2 asm("a2") = arg3;
66  __asm__ volatile("syscall 0\n\t"
67                   : "+r"(a0)
68                   : "r"(a7), "r"(a1), "r"(a2)
69                   : INTERNAL_SYSCALL_CLOBBERS);
70  return a0;
71}
72#define __internal_syscall3(n, a1, a2, a3) \
73  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3))
74
75static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3,
76                               u64 arg4) {
77  register u64 a7 asm("a7") = nr;
78  register u64 a0 asm("a0") = arg1;
79  register u64 a1 asm("a1") = arg2;
80  register u64 a2 asm("a2") = arg3;
81  register u64 a3 asm("a3") = arg4;
82  __asm__ volatile("syscall 0\n\t"
83                   : "+r"(a0)
84                   : "r"(a7), "r"(a1), "r"(a2), "r"(a3)
85                   : INTERNAL_SYSCALL_CLOBBERS);
86  return a0;
87}
88#define __internal_syscall4(n, a1, a2, a3, a4) \
89  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4))
90
91static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3, u64 arg4,
92                               long arg5) {
93  register u64 a7 asm("a7") = nr;
94  register u64 a0 asm("a0") = arg1;
95  register u64 a1 asm("a1") = arg2;
96  register u64 a2 asm("a2") = arg3;
97  register u64 a3 asm("a3") = arg4;
98  register u64 a4 asm("a4") = arg5;
99  __asm__ volatile("syscall 0\n\t"
100                   : "+r"(a0)
101                   : "r"(a7), "r"(a1), "r"(a2), "r"(a3), "r"(a4)
102                   : INTERNAL_SYSCALL_CLOBBERS);
103  return a0;
104}
105#define __internal_syscall5(n, a1, a2, a3, a4, a5)                       \
106  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
107                       (u64)(a5))
108
109static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3, u64 arg4,
110                               long arg5, long arg6) {
111  register u64 a7 asm("a7") = nr;
112  register u64 a0 asm("a0") = arg1;
113  register u64 a1 asm("a1") = arg2;
114  register u64 a2 asm("a2") = arg3;
115  register u64 a3 asm("a3") = arg4;
116  register u64 a4 asm("a4") = arg5;
117  register u64 a5 asm("a5") = arg6;
118  __asm__ volatile("syscall 0\n\t"
119                   : "+r"(a0)
120                   : "r"(a7), "r"(a1), "r"(a2), "r"(a3), "r"(a4), "r"(a5)
121                   : INTERNAL_SYSCALL_CLOBBERS);
122  return a0;
123}
124#define __internal_syscall6(n, a1, a2, a3, a4, a5, a6)                   \
125  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
126                       (u64)(a5), (long)(a6))
127
128static uptr __internal_syscall(u64 nr, u64 arg1, long arg2, long arg3, u64 arg4,
129                               long arg5, long arg6, long arg7) {
130  register u64 a7 asm("a7") = nr;
131  register u64 a0 asm("a0") = arg1;
132  register u64 a1 asm("a1") = arg2;
133  register u64 a2 asm("a2") = arg3;
134  register u64 a3 asm("a3") = arg4;
135  register u64 a4 asm("a4") = arg5;
136  register u64 a5 asm("a5") = arg6;
137  register u64 a6 asm("a6") = arg7;
138  __asm__ volatile("syscall 0\n\t"
139                   : "+r"(a0)
140                   : "r"(a7), "r"(a1), "r"(a2), "r"(a3), "r"(a4), "r"(a5),
141                     "r"(a6)
142                   : INTERNAL_SYSCALL_CLOBBERS);
143  return a0;
144}
145#define __internal_syscall7(n, a1, a2, a3, a4, a5, a6, a7)               \
146  (__internal_syscall)(n, (u64)(a1), (long)(a2), (long)(a3), (long)(a4), \
147                       (u64)(a5), (long)(a6), (long)(a7))
148
149#define __SYSCALL_NARGS_X(a1, a2, a3, a4, a5, a6, a7, a8, n, ...) n
150#define __SYSCALL_NARGS(...) \
151  __SYSCALL_NARGS_X(__VA_ARGS__, 7, 6, 5, 4, 3, 2, 1, 0, )
152#define __SYSCALL_CONCAT_X(a, b) a##b
153#define __SYSCALL_CONCAT(a, b) __SYSCALL_CONCAT_X(a, b)
154#define __SYSCALL_DISP(b, ...) \
155  __SYSCALL_CONCAT(b, __SYSCALL_NARGS(__VA_ARGS__))(__VA_ARGS__)
156
157#define internal_syscall(...) __SYSCALL_DISP(__internal_syscall, __VA_ARGS__)
158
159// Helper function used to avoid clobbering of errno.
160bool internal_iserror(uptr retval, int *internal_errno) {
161  if (retval >= (uptr)-4095) {
162    if (internal_errno)
163      *internal_errno = -retval;
164    return true;
165  }
166  return false;
167}
168