OLD | NEW |
| (Empty) |
1 #define __SYSCALL_LL_E(x) \ | |
2 ((union { \ | |
3 long long ll; \ | |
4 long l[2]; \ | |
5 }){.ll = x}) \ | |
6 .l[0], \ | |
7 ((union { \ | |
8 long long ll; \ | |
9 long l[2]; \ | |
10 }){.ll = x}) \ | |
11 .l[1] | |
12 #define __SYSCALL_LL_O(x) 0, __SYSCALL_LL_E((x)) | |
13 | |
14 __attribute__((visibility("hidden"))) long(__syscall)(long, ...); | |
15 | |
16 #define SYSCALL_RLIM_INFINITY (-1UL / 2) | |
17 | |
18 #if _MIPSEL || __MIPSEL || __MIPSEL__ | |
19 #define __stat_fix(st) ((st), (void)0) | |
20 #else | |
21 #include <sys/stat.h> | |
22 static inline void __stat_fix(long p) { | |
23 struct stat* st = (struct stat*)p; | |
24 st->st_dev >>= 32; | |
25 st->st_rdev >>= 32; | |
26 } | |
27 #endif | |
28 | |
29 #ifndef __clang__ | |
30 | |
31 static inline long __syscall0(long n) { | |
32 register long r7 __asm__("$7"); | |
33 register long r2 __asm__("$2"); | |
34 __asm__ __volatile__("addu $2,$0,%2 ; syscall" | |
35 : "=&r"(r2), "=r"(r7) | |
36 : "ir"(n), "0"(r2), "1"(r7) | |
37 : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", | |
38 "$14", "$15", "$24", "$25", "hi", "lo", "memory"); | |
39 return r7 ? -r2 : r2; | |
40 } | |
41 | |
42 static inline long __syscall1(long n, long a) { | |
43 register long r4 __asm__("$4") = a; | |
44 register long r7 __asm__("$7"); | |
45 register long r2 __asm__("$2"); | |
46 __asm__ __volatile__("addu $2,$0,%2 ; syscall" | |
47 : "=&r"(r2), "=r"(r7) | |
48 : "ir"(n), "0"(r2), "1"(r7), "r"(r4) | |
49 : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", | |
50 "$14", "$15", "$24", "$25", "hi", "lo", "memory"); | |
51 return r7 ? -r2 : r2; | |
52 } | |
53 | |
54 static inline long __syscall2(long n, long a, long b) { | |
55 register long r4 __asm__("$4") = a; | |
56 register long r5 __asm__("$5") = b; | |
57 register long r7 __asm__("$7"); | |
58 register long r2 __asm__("$2"); | |
59 __asm__ __volatile__("addu $2,$0,%2 ; syscall" | |
60 : "=&r"(r2), "=r"(r7) | |
61 : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5) | |
62 : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", | |
63 "$14", "$15", "$24", "$25", "hi", "lo", "memory"); | |
64 if (r7) | |
65 return -r2; | |
66 long ret = r2; | |
67 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
68 __stat_fix(b); | |
69 return ret; | |
70 } | |
71 | |
72 static inline long __syscall3(long n, long a, long b, long c) { | |
73 register long r4 __asm__("$4") = a; | |
74 register long r5 __asm__("$5") = b; | |
75 register long r6 __asm__("$6") = c; | |
76 register long r7 __asm__("$7"); | |
77 register long r2 __asm__("$2"); | |
78 __asm__ __volatile__("addu $2,$0,%2 ; syscall" | |
79 : "=&r"(r2), "=r"(r7) | |
80 : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6) | |
81 : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", | |
82 "$14", "$15", "$24", "$25", "hi", "lo", "memory"); | |
83 if (r7) | |
84 return -r2; | |
85 long ret = r2; | |
86 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
87 __stat_fix(b); | |
88 return ret; | |
89 } | |
90 | |
91 static inline long __syscall4(long n, long a, long b, long c, long d) { | |
92 register long r4 __asm__("$4") = a; | |
93 register long r5 __asm__("$5") = b; | |
94 register long r6 __asm__("$6") = c; | |
95 register long r7 __asm__("$7") = d; | |
96 register long r2 __asm__("$2"); | |
97 __asm__ __volatile__("addu $2,$0,%2 ; syscall" | |
98 : "=&r"(r2), "=r"(r7) | |
99 : "ir"(n), "0"(r2), "1"(r7), "r"(r4), "r"(r5), "r"(r6) | |
100 : "$1", "$3", "$8", "$9", "$10", "$11", "$12", "$13", | |
101 "$14", "$15", "$24", "$25", "hi", "lo", "memory"); | |
102 if (r7) | |
103 return -r2; | |
104 long ret = r2; | |
105 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
106 __stat_fix(b); | |
107 if (n == SYS_fstatat) | |
108 __stat_fix(c); | |
109 return ret; | |
110 } | |
111 | |
112 #else | |
113 | |
114 static inline long __syscall0(long n) { | |
115 return (__syscall)(n); | |
116 } | |
117 | |
118 static inline long __syscall1(long n, long a) { | |
119 return (__syscall)(n, a); | |
120 } | |
121 | |
122 static inline long __syscall2(long n, long a, long b) { | |
123 long r2 = (__syscall)(n, a, b); | |
124 if (r2 > -4096UL) | |
125 return r2; | |
126 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
127 __stat_fix(b); | |
128 return r2; | |
129 } | |
130 | |
131 static inline long __syscall3(long n, long a, long b, long c) { | |
132 long r2 = (__syscall)(n, a, b, c); | |
133 if (r2 > -4096UL) | |
134 return r2; | |
135 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
136 __stat_fix(b); | |
137 return r2; | |
138 } | |
139 | |
140 static inline long __syscall4(long n, long a, long b, long c, long d) { | |
141 long r2 = (__syscall)(n, a, b, c, d); | |
142 if (r2 > -4096UL) | |
143 return r2; | |
144 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
145 __stat_fix(b); | |
146 if (n == SYS_fstatat) | |
147 __stat_fix(c); | |
148 return r2; | |
149 } | |
150 | |
151 #endif | |
152 | |
153 static inline long __syscall5(long n, long a, long b, long c, long d, long e) { | |
154 long r2 = (__syscall)(n, a, b, c, d, e); | |
155 if (r2 > -4096UL) | |
156 return r2; | |
157 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
158 __stat_fix(b); | |
159 if (n == SYS_fstatat) | |
160 __stat_fix(c); | |
161 return r2; | |
162 } | |
163 | |
164 static inline long | |
165 __syscall6(long n, long a, long b, long c, long d, long e, long f) { | |
166 long r2 = (__syscall)(n, a, b, c, d, e, f); | |
167 if (r2 > -4096UL) | |
168 return r2; | |
169 if (n == SYS_stat64 || n == SYS_fstat64 || n == SYS_lstat64) | |
170 __stat_fix(b); | |
171 if (n == SYS_fstatat) | |
172 __stat_fix(c); | |
173 return r2; | |
174 } | |
175 | |
176 #define VDSO_USEFUL | |
177 #define VDSO_CGT_SYM "__vdso_clock_gettime" | |
178 #define VDSO_CGT_VER "LINUX_2.6" | |
OLD | NEW |