1 /* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56 */
57
58
59 /* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75
76
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 14
93
94
95 #include <stdarg.h>
96
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110 */
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_dragonfly
114 #undef PLAT_amd64_dragonfly
115 #undef PLAT_x86_win32
116 #undef PLAT_amd64_win64
117 #undef PLAT_x86_linux
118 #undef PLAT_amd64_linux
119 #undef PLAT_ppc32_linux
120 #undef PLAT_ppc64be_linux
121 #undef PLAT_ppc64le_linux
122 #undef PLAT_arm_linux
123 #undef PLAT_arm64_linux
124 #undef PLAT_s390x_linux
125 #undef PLAT_mips32_linux
126 #undef PLAT_mips64_linux
127 #undef PLAT_x86_solaris
128 #undef PLAT_amd64_solaris
129
130
131 #if defined(__APPLE__) && defined(__i386__)
132 # define PLAT_x86_darwin 1
133 #elif defined(__APPLE__) && defined(__x86_64__)
134 # define PLAT_amd64_darwin 1
135 #elif defined(__DragonFly__) && defined(__i386__)
136 # define PLAT_x86_dragonfly 1
137 #elif defined(__DragonFly__) && defined(__amd64__)
138 # define PLAT_amd64_dragonfly 1
139 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
140 || defined(__CYGWIN32__) \
141 || (defined(_WIN32) && defined(_M_IX86))
142 # define PLAT_x86_win32 1
143 #elif defined(__MINGW64__) \
144 || (defined(_WIN64) && defined(_M_X64))
145 # define PLAT_amd64_win64 1
146 #elif defined(__linux__) && defined(__i386__)
147 # define PLAT_x86_linux 1
148 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
149 # define PLAT_amd64_linux 1
150 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
151 # define PLAT_ppc32_linux 1
152 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
153 /* Big Endian uses ELF version 1 */
154 # define PLAT_ppc64be_linux 1
155 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
156 /* Little Endian uses ELF version 2 */
157 # define PLAT_ppc64le_linux 1
158 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
159 # define PLAT_arm_linux 1
160 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
161 # define PLAT_arm64_linux 1
162 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
163 # define PLAT_s390x_linux 1
164 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
165 # define PLAT_mips64_linux 1
166 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
167 # define PLAT_mips32_linux 1
168 #elif defined(__sun) && defined(__i386__)
169 # define PLAT_x86_solaris 1
170 #elif defined(__sun) && defined(__x86_64__)
171 # define PLAT_amd64_solaris 1
172 #else
173 /* If we're not compiling for our target platform, don't generate
174 any inline asms. */
175 # if !defined(NVALGRIND)
176 # define NVALGRIND 1
177 # endif
178 #endif
179
180
181 /* ------------------------------------------------------------------ */
182 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
183 /* in here of use to end-users -- skip to the next section. */
184 /* ------------------------------------------------------------------ */
185
186 /*
187 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
188 * request. Accepts both pointers and integers as arguments.
189 *
190 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
191 * client request that does not return a value.
192
193 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
194 * client request and whose value equals the client request result. Accepts
195 * both pointers and integers as arguments. Note that such calls are not
196 * necessarily pure functions -- they may have side effects.
197 */
198
199 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
200 _zzq_request, _zzq_arg1, _zzq_arg2, \
201 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
202 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
203 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
204 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
205
206 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
207 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
208 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
209 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
210 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
211
212 #if defined(NVALGRIND)
213
214 /* Define NVALGRIND to completely remove the Valgrind magic sequence
215 from the compiled code (analogous to NDEBUG's effects on
216 assert()) */
217 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
218 _zzq_default, _zzq_request, \
219 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
220 (_zzq_default)
221
222 #else /* ! NVALGRIND */
223
224 /* The following defines the magic code sequences which the JITter
225 spots and handles magically. Don't look too closely at them as
226 they will rot your brain.
227
228 The assembly code sequences for all architectures is in this one
229 file. This is because this file must be stand-alone, and we don't
230 want to have multiple files.
231
232 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
233 value gets put in the return slot, so that everything works when
234 this is executed not under Valgrind. Args are passed in a memory
235 block, and so there's no intrinsic limit to the number that could
236 be passed, but it's currently five.
237
238 The macro args are:
239 _zzq_rlval result lvalue
240 _zzq_default default value (result returned when running on real CPU)
241 _zzq_request request code
242 _zzq_arg1..5 request params
243
244 The other two macros are used to support function wrapping, and are
245 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
246 guest's NRADDR pseudo-register and whatever other information is
247 needed to safely run the call original from the wrapper: on
248 ppc64-linux, the R2 value at the divert point is also needed. This
249 information is abstracted into a user-visible type, OrigFn.
250
251 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
252 guest, but guarantees that the branch instruction will not be
253 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
254 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
255 complete inline asm, since it needs to be combined with more magic
256 inline asm stuff to be useful.
257 */
258
259 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
260
261 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
262 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
263 || defined(PLAT_x86_solaris) || defined(PLAT_x86_dragonfly)
264
265 typedef
266 struct {
267 unsigned int nraddr; /* where's the code? */
268 }
269 OrigFn;
270
271 #define __SPECIAL_INSTRUCTION_PREAMBLE \
272 "roll $3, %%edi ; roll $13, %%edi\n\t" \
273 "roll $29, %%edi ; roll $19, %%edi\n\t"
274
275 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
276 _zzq_default, _zzq_request, \
277 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
278 __extension__ \
279 ({volatile unsigned int _zzq_args[6]; \
280 volatile unsigned int _zzq_result; \
281 _zzq_args[0] = (unsigned int)(_zzq_request); \
282 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
283 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
284 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
285 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
286 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
287 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
288 /* %EDX = client_request ( %EAX ) */ \
289 "xchgl %%ebx,%%ebx" \
290 : "=d" (_zzq_result) \
291 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
292 : "cc", "memory" \
293 ); \
294 _zzq_result; \
295 })
296
297 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
298 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
299 volatile unsigned int __addr; \
300 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
301 /* %EAX = guest_NRADDR */ \
302 "xchgl %%ecx,%%ecx" \
303 : "=a" (__addr) \
304 : \
305 : "cc", "memory" \
306 ); \
307 _zzq_orig->nraddr = __addr; \
308 }
309
310 #define VALGRIND_CALL_NOREDIR_EAX \
311 __SPECIAL_INSTRUCTION_PREAMBLE \
312 /* call-noredir *%EAX */ \
313 "xchgl %%edx,%%edx\n\t"
314
315 #define VALGRIND_VEX_INJECT_IR() \
316 do { \
317 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
318 "xchgl %%edi,%%edi\n\t" \
319 : : : "cc", "memory" \
320 ); \
321 } while (0)
322
323 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
324 || PLAT_x86_solaris */
325
326 /* ------------------------- x86-Win32 ------------------------- */
327
328 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
329
330 typedef
331 struct {
332 unsigned int nraddr; /* where's the code? */
333 }
334 OrigFn;
335
336 #if defined(_MSC_VER)
337
338 #define __SPECIAL_INSTRUCTION_PREAMBLE \
339 __asm rol edi, 3 __asm rol edi, 13 \
340 __asm rol edi, 29 __asm rol edi, 19
341
342 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
343 _zzq_default, _zzq_request, \
344 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
345 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
346 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
347 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
348 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
349
350 static __inline uintptr_t
valgrind_do_client_request_expr(uintptr_t _zzq_default,uintptr_t _zzq_request,uintptr_t _zzq_arg1,uintptr_t _zzq_arg2,uintptr_t _zzq_arg3,uintptr_t _zzq_arg4,uintptr_t _zzq_arg5)351 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
352 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
353 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
354 uintptr_t _zzq_arg5)
355 {
356 volatile uintptr_t _zzq_args[6];
357 volatile unsigned int _zzq_result;
358 _zzq_args[0] = (uintptr_t)(_zzq_request);
359 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
360 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
361 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
362 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
363 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
364 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
365 __SPECIAL_INSTRUCTION_PREAMBLE
366 /* %EDX = client_request ( %EAX ) */
367 __asm xchg ebx,ebx
368 __asm mov _zzq_result, edx
369 }
370 return _zzq_result;
371 }
372
373 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
374 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
375 volatile unsigned int __addr; \
376 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
377 /* %EAX = guest_NRADDR */ \
378 __asm xchg ecx,ecx \
379 __asm mov __addr, eax \
380 } \
381 _zzq_orig->nraddr = __addr; \
382 }
383
384 #define VALGRIND_CALL_NOREDIR_EAX ERROR
385
386 #define VALGRIND_VEX_INJECT_IR() \
387 do { \
388 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
389 __asm xchg edi,edi \
390 } \
391 } while (0)
392
393 #else
394 #error Unsupported compiler.
395 #endif
396
397 #endif /* PLAT_x86_win32 */
398
399 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
400
401 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
402 || defined(PLAT_amd64_solaris) \
403 || defined(PLAT_amd64_dragonfly) \
404 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
405
406 typedef
407 struct {
408 unsigned long int nraddr; /* where's the code? */
409 }
410 OrigFn;
411
412 #define __SPECIAL_INSTRUCTION_PREAMBLE \
413 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
414 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
415
416 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
417 _zzq_default, _zzq_request, \
418 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
419 __extension__ \
420 ({ volatile unsigned long int _zzq_args[6]; \
421 volatile unsigned long int _zzq_result; \
422 _zzq_args[0] = (unsigned long int)(_zzq_request); \
423 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
424 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
425 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
426 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
427 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
428 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
429 /* %RDX = client_request ( %RAX ) */ \
430 "xchgq %%rbx,%%rbx" \
431 : "=d" (_zzq_result) \
432 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
433 : "cc", "memory" \
434 ); \
435 _zzq_result; \
436 })
437
438 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
439 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
440 volatile unsigned long int __addr; \
441 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
442 /* %RAX = guest_NRADDR */ \
443 "xchgq %%rcx,%%rcx" \
444 : "=a" (__addr) \
445 : \
446 : "cc", "memory" \
447 ); \
448 _zzq_orig->nraddr = __addr; \
449 }
450
451 #define VALGRIND_CALL_NOREDIR_RAX \
452 __SPECIAL_INSTRUCTION_PREAMBLE \
453 /* call-noredir *%RAX */ \
454 "xchgq %%rdx,%%rdx\n\t"
455
456 #define VALGRIND_VEX_INJECT_IR() \
457 do { \
458 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
459 "xchgq %%rdi,%%rdi\n\t" \
460 : : : "cc", "memory" \
461 ); \
462 } while (0)
463
464 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
465
466 /* ------------------------- amd64-Win64 ------------------------- */
467
468 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
469
470 #error Unsupported compiler.
471
472 #endif /* PLAT_amd64_win64 */
473
474 /* ------------------------ ppc32-linux ------------------------ */
475
476 #if defined(PLAT_ppc32_linux)
477
478 typedef
479 struct {
480 unsigned int nraddr; /* where's the code? */
481 }
482 OrigFn;
483
484 #define __SPECIAL_INSTRUCTION_PREAMBLE \
485 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
486 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
487
488 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
489 _zzq_default, _zzq_request, \
490 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
491 \
492 __extension__ \
493 ({ unsigned int _zzq_args[6]; \
494 unsigned int _zzq_result; \
495 unsigned int* _zzq_ptr; \
496 _zzq_args[0] = (unsigned int)(_zzq_request); \
497 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
498 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
499 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
500 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
501 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
502 _zzq_ptr = _zzq_args; \
503 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
504 "mr 4,%2\n\t" /*ptr*/ \
505 __SPECIAL_INSTRUCTION_PREAMBLE \
506 /* %R3 = client_request ( %R4 ) */ \
507 "or 1,1,1\n\t" \
508 "mr %0,3" /*result*/ \
509 : "=b" (_zzq_result) \
510 : "b" (_zzq_default), "b" (_zzq_ptr) \
511 : "cc", "memory", "r3", "r4"); \
512 _zzq_result; \
513 })
514
515 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
516 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
517 unsigned int __addr; \
518 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
519 /* %R3 = guest_NRADDR */ \
520 "or 2,2,2\n\t" \
521 "mr %0,3" \
522 : "=b" (__addr) \
523 : \
524 : "cc", "memory", "r3" \
525 ); \
526 _zzq_orig->nraddr = __addr; \
527 }
528
529 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
530 __SPECIAL_INSTRUCTION_PREAMBLE \
531 /* branch-and-link-to-noredir *%R11 */ \
532 "or 3,3,3\n\t"
533
534 #define VALGRIND_VEX_INJECT_IR() \
535 do { \
536 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
537 "or 5,5,5\n\t" \
538 ); \
539 } while (0)
540
541 #endif /* PLAT_ppc32_linux */
542
543 /* ------------------------ ppc64-linux ------------------------ */
544
545 #if defined(PLAT_ppc64be_linux)
546
547 typedef
548 struct {
549 unsigned long int nraddr; /* where's the code? */
550 unsigned long int r2; /* what tocptr do we need? */
551 }
552 OrigFn;
553
554 #define __SPECIAL_INSTRUCTION_PREAMBLE \
555 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
556 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
557
558 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
559 _zzq_default, _zzq_request, \
560 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
561 \
562 __extension__ \
563 ({ unsigned long int _zzq_args[6]; \
564 unsigned long int _zzq_result; \
565 unsigned long int* _zzq_ptr; \
566 _zzq_args[0] = (unsigned long int)(_zzq_request); \
567 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
568 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
569 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
570 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
571 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
572 _zzq_ptr = _zzq_args; \
573 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
574 "mr 4,%2\n\t" /*ptr*/ \
575 __SPECIAL_INSTRUCTION_PREAMBLE \
576 /* %R3 = client_request ( %R4 ) */ \
577 "or 1,1,1\n\t" \
578 "mr %0,3" /*result*/ \
579 : "=b" (_zzq_result) \
580 : "b" (_zzq_default), "b" (_zzq_ptr) \
581 : "cc", "memory", "r3", "r4"); \
582 _zzq_result; \
583 })
584
585 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
586 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
587 unsigned long int __addr; \
588 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
589 /* %R3 = guest_NRADDR */ \
590 "or 2,2,2\n\t" \
591 "mr %0,3" \
592 : "=b" (__addr) \
593 : \
594 : "cc", "memory", "r3" \
595 ); \
596 _zzq_orig->nraddr = __addr; \
597 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
598 /* %R3 = guest_NRADDR_GPR2 */ \
599 "or 4,4,4\n\t" \
600 "mr %0,3" \
601 : "=b" (__addr) \
602 : \
603 : "cc", "memory", "r3" \
604 ); \
605 _zzq_orig->r2 = __addr; \
606 }
607
608 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
609 __SPECIAL_INSTRUCTION_PREAMBLE \
610 /* branch-and-link-to-noredir *%R11 */ \
611 "or 3,3,3\n\t"
612
613 #define VALGRIND_VEX_INJECT_IR() \
614 do { \
615 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
616 "or 5,5,5\n\t" \
617 ); \
618 } while (0)
619
620 #endif /* PLAT_ppc64be_linux */
621
622 #if defined(PLAT_ppc64le_linux)
623
624 typedef
625 struct {
626 unsigned long int nraddr; /* where's the code? */
627 unsigned long int r2; /* what tocptr do we need? */
628 }
629 OrigFn;
630
631 #define __SPECIAL_INSTRUCTION_PREAMBLE \
632 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
633 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
634
635 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
636 _zzq_default, _zzq_request, \
637 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
638 \
639 __extension__ \
640 ({ unsigned long int _zzq_args[6]; \
641 unsigned long int _zzq_result; \
642 unsigned long int* _zzq_ptr; \
643 _zzq_args[0] = (unsigned long int)(_zzq_request); \
644 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
645 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
646 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
647 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
648 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
649 _zzq_ptr = _zzq_args; \
650 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
651 "mr 4,%2\n\t" /*ptr*/ \
652 __SPECIAL_INSTRUCTION_PREAMBLE \
653 /* %R3 = client_request ( %R4 ) */ \
654 "or 1,1,1\n\t" \
655 "mr %0,3" /*result*/ \
656 : "=b" (_zzq_result) \
657 : "b" (_zzq_default), "b" (_zzq_ptr) \
658 : "cc", "memory", "r3", "r4"); \
659 _zzq_result; \
660 })
661
662 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
663 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
664 unsigned long int __addr; \
665 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
666 /* %R3 = guest_NRADDR */ \
667 "or 2,2,2\n\t" \
668 "mr %0,3" \
669 : "=b" (__addr) \
670 : \
671 : "cc", "memory", "r3" \
672 ); \
673 _zzq_orig->nraddr = __addr; \
674 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
675 /* %R3 = guest_NRADDR_GPR2 */ \
676 "or 4,4,4\n\t" \
677 "mr %0,3" \
678 : "=b" (__addr) \
679 : \
680 : "cc", "memory", "r3" \
681 ); \
682 _zzq_orig->r2 = __addr; \
683 }
684
685 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
686 __SPECIAL_INSTRUCTION_PREAMBLE \
687 /* branch-and-link-to-noredir *%R12 */ \
688 "or 3,3,3\n\t"
689
690 #define VALGRIND_VEX_INJECT_IR() \
691 do { \
692 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
693 "or 5,5,5\n\t" \
694 ); \
695 } while (0)
696
697 #endif /* PLAT_ppc64le_linux */
698
699 /* ------------------------- arm-linux ------------------------- */
700
701 #if defined(PLAT_arm_linux)
702
703 typedef
704 struct {
705 unsigned int nraddr; /* where's the code? */
706 }
707 OrigFn;
708
709 #define __SPECIAL_INSTRUCTION_PREAMBLE \
710 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
711 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
712
713 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
714 _zzq_default, _zzq_request, \
715 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
716 \
717 __extension__ \
718 ({volatile unsigned int _zzq_args[6]; \
719 volatile unsigned int _zzq_result; \
720 _zzq_args[0] = (unsigned int)(_zzq_request); \
721 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
722 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
723 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
724 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
725 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
726 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
727 "mov r4, %2\n\t" /*ptr*/ \
728 __SPECIAL_INSTRUCTION_PREAMBLE \
729 /* R3 = client_request ( R4 ) */ \
730 "orr r10, r10, r10\n\t" \
731 "mov %0, r3" /*result*/ \
732 : "=r" (_zzq_result) \
733 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
734 : "cc","memory", "r3", "r4"); \
735 _zzq_result; \
736 })
737
738 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
739 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
740 unsigned int __addr; \
741 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
742 /* R3 = guest_NRADDR */ \
743 "orr r11, r11, r11\n\t" \
744 "mov %0, r3" \
745 : "=r" (__addr) \
746 : \
747 : "cc", "memory", "r3" \
748 ); \
749 _zzq_orig->nraddr = __addr; \
750 }
751
752 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
753 __SPECIAL_INSTRUCTION_PREAMBLE \
754 /* branch-and-link-to-noredir *%R4 */ \
755 "orr r12, r12, r12\n\t"
756
757 #define VALGRIND_VEX_INJECT_IR() \
758 do { \
759 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
760 "orr r9, r9, r9\n\t" \
761 : : : "cc", "memory" \
762 ); \
763 } while (0)
764
765 #endif /* PLAT_arm_linux */
766
767 /* ------------------------ arm64-linux ------------------------- */
768
769 #if defined(PLAT_arm64_linux)
770
771 typedef
772 struct {
773 unsigned long int nraddr; /* where's the code? */
774 }
775 OrigFn;
776
777 #define __SPECIAL_INSTRUCTION_PREAMBLE \
778 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
779 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
780
781 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
782 _zzq_default, _zzq_request, \
783 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
784 \
785 __extension__ \
786 ({volatile unsigned long int _zzq_args[6]; \
787 volatile unsigned long int _zzq_result; \
788 _zzq_args[0] = (unsigned long int)(_zzq_request); \
789 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
790 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
791 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
792 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
793 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
794 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
795 "mov x4, %2\n\t" /*ptr*/ \
796 __SPECIAL_INSTRUCTION_PREAMBLE \
797 /* X3 = client_request ( X4 ) */ \
798 "orr x10, x10, x10\n\t" \
799 "mov %0, x3" /*result*/ \
800 : "=r" (_zzq_result) \
801 : "r" ((unsigned long int)(_zzq_default)), \
802 "r" (&_zzq_args[0]) \
803 : "cc","memory", "x3", "x4"); \
804 _zzq_result; \
805 })
806
807 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
808 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
809 unsigned long int __addr; \
810 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
811 /* X3 = guest_NRADDR */ \
812 "orr x11, x11, x11\n\t" \
813 "mov %0, x3" \
814 : "=r" (__addr) \
815 : \
816 : "cc", "memory", "x3" \
817 ); \
818 _zzq_orig->nraddr = __addr; \
819 }
820
821 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
822 __SPECIAL_INSTRUCTION_PREAMBLE \
823 /* branch-and-link-to-noredir X8 */ \
824 "orr x12, x12, x12\n\t"
825
826 #define VALGRIND_VEX_INJECT_IR() \
827 do { \
828 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
829 "orr x9, x9, x9\n\t" \
830 : : : "cc", "memory" \
831 ); \
832 } while (0)
833
834 #endif /* PLAT_arm64_linux */
835
836 /* ------------------------ s390x-linux ------------------------ */
837
838 #if defined(PLAT_s390x_linux)
839
840 typedef
841 struct {
842 unsigned long int nraddr; /* where's the code? */
843 }
844 OrigFn;
845
846 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
847 * code. This detection is implemented in platform specific toIR.c
848 * (e.g. VEX/priv/guest_s390_decoder.c).
849 */
850 #define __SPECIAL_INSTRUCTION_PREAMBLE \
851 "lr 15,15\n\t" \
852 "lr 1,1\n\t" \
853 "lr 2,2\n\t" \
854 "lr 3,3\n\t"
855
856 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
857 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
858 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
859 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
860
861 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
862 _zzq_default, _zzq_request, \
863 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
864 __extension__ \
865 ({volatile unsigned long int _zzq_args[6]; \
866 volatile unsigned long int _zzq_result; \
867 _zzq_args[0] = (unsigned long int)(_zzq_request); \
868 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
869 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
870 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
871 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
872 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
873 __asm__ volatile(/* r2 = args */ \
874 "lgr 2,%1\n\t" \
875 /* r3 = default */ \
876 "lgr 3,%2\n\t" \
877 __SPECIAL_INSTRUCTION_PREAMBLE \
878 __CLIENT_REQUEST_CODE \
879 /* results = r3 */ \
880 "lgr %0, 3\n\t" \
881 : "=d" (_zzq_result) \
882 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
883 : "cc", "2", "3", "memory" \
884 ); \
885 _zzq_result; \
886 })
887
888 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
889 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
890 volatile unsigned long int __addr; \
891 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
892 __GET_NR_CONTEXT_CODE \
893 "lgr %0, 3\n\t" \
894 : "=a" (__addr) \
895 : \
896 : "cc", "3", "memory" \
897 ); \
898 _zzq_orig->nraddr = __addr; \
899 }
900
901 #define VALGRIND_CALL_NOREDIR_R1 \
902 __SPECIAL_INSTRUCTION_PREAMBLE \
903 __CALL_NO_REDIR_CODE
904
905 #define VALGRIND_VEX_INJECT_IR() \
906 do { \
907 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
908 __VEX_INJECT_IR_CODE); \
909 } while (0)
910
911 #endif /* PLAT_s390x_linux */
912
913 /* ------------------------- mips32-linux ---------------- */
914
915 #if defined(PLAT_mips32_linux)
916
917 typedef
918 struct {
919 unsigned int nraddr; /* where's the code? */
920 }
921 OrigFn;
922
923 /* .word 0x342
924 * .word 0x742
925 * .word 0xC2
926 * .word 0x4C2*/
927 #define __SPECIAL_INSTRUCTION_PREAMBLE \
928 "srl $0, $0, 13\n\t" \
929 "srl $0, $0, 29\n\t" \
930 "srl $0, $0, 3\n\t" \
931 "srl $0, $0, 19\n\t"
932
933 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
934 _zzq_default, _zzq_request, \
935 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
936 __extension__ \
937 ({ volatile unsigned int _zzq_args[6]; \
938 volatile unsigned int _zzq_result; \
939 _zzq_args[0] = (unsigned int)(_zzq_request); \
940 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
941 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
942 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
943 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
944 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
945 __asm__ volatile("move $11, %1\n\t" /*default*/ \
946 "move $12, %2\n\t" /*ptr*/ \
947 __SPECIAL_INSTRUCTION_PREAMBLE \
948 /* T3 = client_request ( T4 ) */ \
949 "or $13, $13, $13\n\t" \
950 "move %0, $11\n\t" /*result*/ \
951 : "=r" (_zzq_result) \
952 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
953 : "$11", "$12", "memory"); \
954 _zzq_result; \
955 })
956
957 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
958 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
959 volatile unsigned int __addr; \
960 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
961 /* %t9 = guest_NRADDR */ \
962 "or $14, $14, $14\n\t" \
963 "move %0, $11" /*result*/ \
964 : "=r" (__addr) \
965 : \
966 : "$11" \
967 ); \
968 _zzq_orig->nraddr = __addr; \
969 }
970
971 #define VALGRIND_CALL_NOREDIR_T9 \
972 __SPECIAL_INSTRUCTION_PREAMBLE \
973 /* call-noredir *%t9 */ \
974 "or $15, $15, $15\n\t"
975
976 #define VALGRIND_VEX_INJECT_IR() \
977 do { \
978 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
979 "or $11, $11, $11\n\t" \
980 ); \
981 } while (0)
982
983
984 #endif /* PLAT_mips32_linux */
985
986 /* ------------------------- mips64-linux ---------------- */
987
988 #if defined(PLAT_mips64_linux)
989
990 typedef
991 struct {
992 unsigned long nraddr; /* where's the code? */
993 }
994 OrigFn;
995
996 /* dsll $0,$0, 3
997 * dsll $0,$0, 13
998 * dsll $0,$0, 29
999 * dsll $0,$0, 19*/
1000 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1001 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1002 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1003
1004 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1005 _zzq_default, _zzq_request, \
1006 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1007 __extension__ \
1008 ({ volatile unsigned long int _zzq_args[6]; \
1009 volatile unsigned long int _zzq_result; \
1010 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1011 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1012 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1013 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1014 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1015 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1016 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1017 "move $12, %2\n\t" /*ptr*/ \
1018 __SPECIAL_INSTRUCTION_PREAMBLE \
1019 /* $11 = client_request ( $12 ) */ \
1020 "or $13, $13, $13\n\t" \
1021 "move %0, $11\n\t" /*result*/ \
1022 : "=r" (_zzq_result) \
1023 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1024 : "$11", "$12", "memory"); \
1025 _zzq_result; \
1026 })
1027
1028 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1029 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1030 volatile unsigned long int __addr; \
1031 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1032 /* $11 = guest_NRADDR */ \
1033 "or $14, $14, $14\n\t" \
1034 "move %0, $11" /*result*/ \
1035 : "=r" (__addr) \
1036 : \
1037 : "$11"); \
1038 _zzq_orig->nraddr = __addr; \
1039 }
1040
1041 #define VALGRIND_CALL_NOREDIR_T9 \
1042 __SPECIAL_INSTRUCTION_PREAMBLE \
1043 /* call-noredir $25 */ \
1044 "or $15, $15, $15\n\t"
1045
1046 #define VALGRIND_VEX_INJECT_IR() \
1047 do { \
1048 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1049 "or $11, $11, $11\n\t" \
1050 ); \
1051 } while (0)
1052
1053 #endif /* PLAT_mips64_linux */
1054
1055 /* Insert assembly code for other platforms here... */
1056
1057 #endif /* NVALGRIND */
1058
1059
1060 /* ------------------------------------------------------------------ */
1061 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1062 /* ugly. It's the least-worst tradeoff I can think of. */
1063 /* ------------------------------------------------------------------ */
1064
1065 /* This section defines magic (a.k.a appalling-hack) macros for doing
1066 guaranteed-no-redirection macros, so as to get from function
1067 wrappers to the functions they are wrapping. The whole point is to
1068 construct standard call sequences, but to do the call itself with a
1069 special no-redirect call pseudo-instruction that the JIT
1070 understands and handles specially. This section is long and
1071 repetitious, and I can't see a way to make it shorter.
1072
1073 The naming scheme is as follows:
1074
1075 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1076
1077 'W' stands for "word" and 'v' for "void". Hence there are
1078 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1079 and for each, the possibility of returning a word-typed result, or
1080 no result.
1081 */
1082
1083 /* Use these to write the name of your wrapper. NOTE: duplicates
1084 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1085 the default behaviour equivalance class tag "0000" into the name.
1086 See pub_tool_redir.h for details -- normally you don't need to
1087 think about this, though. */
1088
1089 /* Use an extra level of macroisation so as to ensure the soname/fnname
1090 args are fully macro-expanded before pasting them together. */
1091 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1092
1093 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1094 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1095
1096 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1097 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1098
1099 /* Use this macro from within a wrapper function to collect the
1100 context (address and possibly other info) of the original function.
1101 Once you have that you can then use it in one of the CALL_FN_
1102 macros. The type of the argument _lval is OrigFn. */
1103 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1104
1105 /* Also provide end-user facilities for function replacement, rather
1106 than wrapping. A replacement function differs from a wrapper in
1107 that it has no way to get hold of the original function being
1108 called, and hence no way to call onwards to it. In a replacement
1109 function, VALGRIND_GET_ORIG_FN always returns zero. */
1110
1111 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1112 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1113
1114 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1115 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1116
1117 /* Derivatives of the main macros below, for calling functions
1118 returning void. */
1119
1120 #define CALL_FN_v_v(fnptr) \
1121 do { volatile unsigned long _junk; \
1122 CALL_FN_W_v(_junk,fnptr); } while (0)
1123
1124 #define CALL_FN_v_W(fnptr, arg1) \
1125 do { volatile unsigned long _junk; \
1126 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1127
1128 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1129 do { volatile unsigned long _junk; \
1130 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1131
1132 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1133 do { volatile unsigned long _junk; \
1134 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1135
1136 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1137 do { volatile unsigned long _junk; \
1138 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1139
1140 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1141 do { volatile unsigned long _junk; \
1142 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1143
1144 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1145 do { volatile unsigned long _junk; \
1146 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1147
1148 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1149 do { volatile unsigned long _junk; \
1150 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1151
1152 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1153
1154 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1155 || defined(PLAT_x86_solaris) || defined(PLAT_x86_dragonfly)
1156
1157 /* These regs are trashed by the hidden call. No need to mention eax
1158 as gcc can already see that, plus causes gcc to bomb. */
1159 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1160
1161 /* Macros to save and align the stack before making a function
1162 call and restore it afterwards as gcc may not keep the stack
1163 pointer aligned if it doesn't realise calls are being made
1164 to other functions. */
1165
1166 #define VALGRIND_ALIGN_STACK \
1167 "movl %%esp,%%edi\n\t" \
1168 "andl $0xfffffff0,%%esp\n\t"
1169 #define VALGRIND_RESTORE_STACK \
1170 "movl %%edi,%%esp\n\t"
1171
1172 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1173 long) == 4. */
1174
1175 #define CALL_FN_W_v(lval, orig) \
1176 do { \
1177 volatile OrigFn _orig = (orig); \
1178 volatile unsigned long _argvec[1]; \
1179 volatile unsigned long _res; \
1180 _argvec[0] = (unsigned long)_orig.nraddr; \
1181 __asm__ volatile( \
1182 VALGRIND_ALIGN_STACK \
1183 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1184 VALGRIND_CALL_NOREDIR_EAX \
1185 VALGRIND_RESTORE_STACK \
1186 : /*out*/ "=a" (_res) \
1187 : /*in*/ "a" (&_argvec[0]) \
1188 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1189 ); \
1190 lval = (__typeof__(lval)) _res; \
1191 } while (0)
1192
1193 #define CALL_FN_W_W(lval, orig, arg1) \
1194 do { \
1195 volatile OrigFn _orig = (orig); \
1196 volatile unsigned long _argvec[2]; \
1197 volatile unsigned long _res; \
1198 _argvec[0] = (unsigned long)_orig.nraddr; \
1199 _argvec[1] = (unsigned long)(arg1); \
1200 __asm__ volatile( \
1201 VALGRIND_ALIGN_STACK \
1202 "subl $12, %%esp\n\t" \
1203 "pushl 4(%%eax)\n\t" \
1204 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1205 VALGRIND_CALL_NOREDIR_EAX \
1206 VALGRIND_RESTORE_STACK \
1207 : /*out*/ "=a" (_res) \
1208 : /*in*/ "a" (&_argvec[0]) \
1209 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1210 ); \
1211 lval = (__typeof__(lval)) _res; \
1212 } while (0)
1213
1214 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1215 do { \
1216 volatile OrigFn _orig = (orig); \
1217 volatile unsigned long _argvec[3]; \
1218 volatile unsigned long _res; \
1219 _argvec[0] = (unsigned long)_orig.nraddr; \
1220 _argvec[1] = (unsigned long)(arg1); \
1221 _argvec[2] = (unsigned long)(arg2); \
1222 __asm__ volatile( \
1223 VALGRIND_ALIGN_STACK \
1224 "subl $8, %%esp\n\t" \
1225 "pushl 8(%%eax)\n\t" \
1226 "pushl 4(%%eax)\n\t" \
1227 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1228 VALGRIND_CALL_NOREDIR_EAX \
1229 VALGRIND_RESTORE_STACK \
1230 : /*out*/ "=a" (_res) \
1231 : /*in*/ "a" (&_argvec[0]) \
1232 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1233 ); \
1234 lval = (__typeof__(lval)) _res; \
1235 } while (0)
1236
1237 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1238 do { \
1239 volatile OrigFn _orig = (orig); \
1240 volatile unsigned long _argvec[4]; \
1241 volatile unsigned long _res; \
1242 _argvec[0] = (unsigned long)_orig.nraddr; \
1243 _argvec[1] = (unsigned long)(arg1); \
1244 _argvec[2] = (unsigned long)(arg2); \
1245 _argvec[3] = (unsigned long)(arg3); \
1246 __asm__ volatile( \
1247 VALGRIND_ALIGN_STACK \
1248 "subl $4, %%esp\n\t" \
1249 "pushl 12(%%eax)\n\t" \
1250 "pushl 8(%%eax)\n\t" \
1251 "pushl 4(%%eax)\n\t" \
1252 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1253 VALGRIND_CALL_NOREDIR_EAX \
1254 VALGRIND_RESTORE_STACK \
1255 : /*out*/ "=a" (_res) \
1256 : /*in*/ "a" (&_argvec[0]) \
1257 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1258 ); \
1259 lval = (__typeof__(lval)) _res; \
1260 } while (0)
1261
1262 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1263 do { \
1264 volatile OrigFn _orig = (orig); \
1265 volatile unsigned long _argvec[5]; \
1266 volatile unsigned long _res; \
1267 _argvec[0] = (unsigned long)_orig.nraddr; \
1268 _argvec[1] = (unsigned long)(arg1); \
1269 _argvec[2] = (unsigned long)(arg2); \
1270 _argvec[3] = (unsigned long)(arg3); \
1271 _argvec[4] = (unsigned long)(arg4); \
1272 __asm__ volatile( \
1273 VALGRIND_ALIGN_STACK \
1274 "pushl 16(%%eax)\n\t" \
1275 "pushl 12(%%eax)\n\t" \
1276 "pushl 8(%%eax)\n\t" \
1277 "pushl 4(%%eax)\n\t" \
1278 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1279 VALGRIND_CALL_NOREDIR_EAX \
1280 VALGRIND_RESTORE_STACK \
1281 : /*out*/ "=a" (_res) \
1282 : /*in*/ "a" (&_argvec[0]) \
1283 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1284 ); \
1285 lval = (__typeof__(lval)) _res; \
1286 } while (0)
1287
1288 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1289 do { \
1290 volatile OrigFn _orig = (orig); \
1291 volatile unsigned long _argvec[6]; \
1292 volatile unsigned long _res; \
1293 _argvec[0] = (unsigned long)_orig.nraddr; \
1294 _argvec[1] = (unsigned long)(arg1); \
1295 _argvec[2] = (unsigned long)(arg2); \
1296 _argvec[3] = (unsigned long)(arg3); \
1297 _argvec[4] = (unsigned long)(arg4); \
1298 _argvec[5] = (unsigned long)(arg5); \
1299 __asm__ volatile( \
1300 VALGRIND_ALIGN_STACK \
1301 "subl $12, %%esp\n\t" \
1302 "pushl 20(%%eax)\n\t" \
1303 "pushl 16(%%eax)\n\t" \
1304 "pushl 12(%%eax)\n\t" \
1305 "pushl 8(%%eax)\n\t" \
1306 "pushl 4(%%eax)\n\t" \
1307 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1308 VALGRIND_CALL_NOREDIR_EAX \
1309 VALGRIND_RESTORE_STACK \
1310 : /*out*/ "=a" (_res) \
1311 : /*in*/ "a" (&_argvec[0]) \
1312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1313 ); \
1314 lval = (__typeof__(lval)) _res; \
1315 } while (0)
1316
1317 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1318 do { \
1319 volatile OrigFn _orig = (orig); \
1320 volatile unsigned long _argvec[7]; \
1321 volatile unsigned long _res; \
1322 _argvec[0] = (unsigned long)_orig.nraddr; \
1323 _argvec[1] = (unsigned long)(arg1); \
1324 _argvec[2] = (unsigned long)(arg2); \
1325 _argvec[3] = (unsigned long)(arg3); \
1326 _argvec[4] = (unsigned long)(arg4); \
1327 _argvec[5] = (unsigned long)(arg5); \
1328 _argvec[6] = (unsigned long)(arg6); \
1329 __asm__ volatile( \
1330 VALGRIND_ALIGN_STACK \
1331 "subl $8, %%esp\n\t" \
1332 "pushl 24(%%eax)\n\t" \
1333 "pushl 20(%%eax)\n\t" \
1334 "pushl 16(%%eax)\n\t" \
1335 "pushl 12(%%eax)\n\t" \
1336 "pushl 8(%%eax)\n\t" \
1337 "pushl 4(%%eax)\n\t" \
1338 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1339 VALGRIND_CALL_NOREDIR_EAX \
1340 VALGRIND_RESTORE_STACK \
1341 : /*out*/ "=a" (_res) \
1342 : /*in*/ "a" (&_argvec[0]) \
1343 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1344 ); \
1345 lval = (__typeof__(lval)) _res; \
1346 } while (0)
1347
1348 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1349 arg7) \
1350 do { \
1351 volatile OrigFn _orig = (orig); \
1352 volatile unsigned long _argvec[8]; \
1353 volatile unsigned long _res; \
1354 _argvec[0] = (unsigned long)_orig.nraddr; \
1355 _argvec[1] = (unsigned long)(arg1); \
1356 _argvec[2] = (unsigned long)(arg2); \
1357 _argvec[3] = (unsigned long)(arg3); \
1358 _argvec[4] = (unsigned long)(arg4); \
1359 _argvec[5] = (unsigned long)(arg5); \
1360 _argvec[6] = (unsigned long)(arg6); \
1361 _argvec[7] = (unsigned long)(arg7); \
1362 __asm__ volatile( \
1363 VALGRIND_ALIGN_STACK \
1364 "subl $4, %%esp\n\t" \
1365 "pushl 28(%%eax)\n\t" \
1366 "pushl 24(%%eax)\n\t" \
1367 "pushl 20(%%eax)\n\t" \
1368 "pushl 16(%%eax)\n\t" \
1369 "pushl 12(%%eax)\n\t" \
1370 "pushl 8(%%eax)\n\t" \
1371 "pushl 4(%%eax)\n\t" \
1372 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1373 VALGRIND_CALL_NOREDIR_EAX \
1374 VALGRIND_RESTORE_STACK \
1375 : /*out*/ "=a" (_res) \
1376 : /*in*/ "a" (&_argvec[0]) \
1377 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1378 ); \
1379 lval = (__typeof__(lval)) _res; \
1380 } while (0)
1381
1382 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1383 arg7,arg8) \
1384 do { \
1385 volatile OrigFn _orig = (orig); \
1386 volatile unsigned long _argvec[9]; \
1387 volatile unsigned long _res; \
1388 _argvec[0] = (unsigned long)_orig.nraddr; \
1389 _argvec[1] = (unsigned long)(arg1); \
1390 _argvec[2] = (unsigned long)(arg2); \
1391 _argvec[3] = (unsigned long)(arg3); \
1392 _argvec[4] = (unsigned long)(arg4); \
1393 _argvec[5] = (unsigned long)(arg5); \
1394 _argvec[6] = (unsigned long)(arg6); \
1395 _argvec[7] = (unsigned long)(arg7); \
1396 _argvec[8] = (unsigned long)(arg8); \
1397 __asm__ volatile( \
1398 VALGRIND_ALIGN_STACK \
1399 "pushl 32(%%eax)\n\t" \
1400 "pushl 28(%%eax)\n\t" \
1401 "pushl 24(%%eax)\n\t" \
1402 "pushl 20(%%eax)\n\t" \
1403 "pushl 16(%%eax)\n\t" \
1404 "pushl 12(%%eax)\n\t" \
1405 "pushl 8(%%eax)\n\t" \
1406 "pushl 4(%%eax)\n\t" \
1407 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1408 VALGRIND_CALL_NOREDIR_EAX \
1409 VALGRIND_RESTORE_STACK \
1410 : /*out*/ "=a" (_res) \
1411 : /*in*/ "a" (&_argvec[0]) \
1412 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1413 ); \
1414 lval = (__typeof__(lval)) _res; \
1415 } while (0)
1416
1417 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1418 arg7,arg8,arg9) \
1419 do { \
1420 volatile OrigFn _orig = (orig); \
1421 volatile unsigned long _argvec[10]; \
1422 volatile unsigned long _res; \
1423 _argvec[0] = (unsigned long)_orig.nraddr; \
1424 _argvec[1] = (unsigned long)(arg1); \
1425 _argvec[2] = (unsigned long)(arg2); \
1426 _argvec[3] = (unsigned long)(arg3); \
1427 _argvec[4] = (unsigned long)(arg4); \
1428 _argvec[5] = (unsigned long)(arg5); \
1429 _argvec[6] = (unsigned long)(arg6); \
1430 _argvec[7] = (unsigned long)(arg7); \
1431 _argvec[8] = (unsigned long)(arg8); \
1432 _argvec[9] = (unsigned long)(arg9); \
1433 __asm__ volatile( \
1434 VALGRIND_ALIGN_STACK \
1435 "subl $12, %%esp\n\t" \
1436 "pushl 36(%%eax)\n\t" \
1437 "pushl 32(%%eax)\n\t" \
1438 "pushl 28(%%eax)\n\t" \
1439 "pushl 24(%%eax)\n\t" \
1440 "pushl 20(%%eax)\n\t" \
1441 "pushl 16(%%eax)\n\t" \
1442 "pushl 12(%%eax)\n\t" \
1443 "pushl 8(%%eax)\n\t" \
1444 "pushl 4(%%eax)\n\t" \
1445 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1446 VALGRIND_CALL_NOREDIR_EAX \
1447 VALGRIND_RESTORE_STACK \
1448 : /*out*/ "=a" (_res) \
1449 : /*in*/ "a" (&_argvec[0]) \
1450 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1451 ); \
1452 lval = (__typeof__(lval)) _res; \
1453 } while (0)
1454
1455 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1456 arg7,arg8,arg9,arg10) \
1457 do { \
1458 volatile OrigFn _orig = (orig); \
1459 volatile unsigned long _argvec[11]; \
1460 volatile unsigned long _res; \
1461 _argvec[0] = (unsigned long)_orig.nraddr; \
1462 _argvec[1] = (unsigned long)(arg1); \
1463 _argvec[2] = (unsigned long)(arg2); \
1464 _argvec[3] = (unsigned long)(arg3); \
1465 _argvec[4] = (unsigned long)(arg4); \
1466 _argvec[5] = (unsigned long)(arg5); \
1467 _argvec[6] = (unsigned long)(arg6); \
1468 _argvec[7] = (unsigned long)(arg7); \
1469 _argvec[8] = (unsigned long)(arg8); \
1470 _argvec[9] = (unsigned long)(arg9); \
1471 _argvec[10] = (unsigned long)(arg10); \
1472 __asm__ volatile( \
1473 VALGRIND_ALIGN_STACK \
1474 "subl $8, %%esp\n\t" \
1475 "pushl 40(%%eax)\n\t" \
1476 "pushl 36(%%eax)\n\t" \
1477 "pushl 32(%%eax)\n\t" \
1478 "pushl 28(%%eax)\n\t" \
1479 "pushl 24(%%eax)\n\t" \
1480 "pushl 20(%%eax)\n\t" \
1481 "pushl 16(%%eax)\n\t" \
1482 "pushl 12(%%eax)\n\t" \
1483 "pushl 8(%%eax)\n\t" \
1484 "pushl 4(%%eax)\n\t" \
1485 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1486 VALGRIND_CALL_NOREDIR_EAX \
1487 VALGRIND_RESTORE_STACK \
1488 : /*out*/ "=a" (_res) \
1489 : /*in*/ "a" (&_argvec[0]) \
1490 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1491 ); \
1492 lval = (__typeof__(lval)) _res; \
1493 } while (0)
1494
1495 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1496 arg6,arg7,arg8,arg9,arg10, \
1497 arg11) \
1498 do { \
1499 volatile OrigFn _orig = (orig); \
1500 volatile unsigned long _argvec[12]; \
1501 volatile unsigned long _res; \
1502 _argvec[0] = (unsigned long)_orig.nraddr; \
1503 _argvec[1] = (unsigned long)(arg1); \
1504 _argvec[2] = (unsigned long)(arg2); \
1505 _argvec[3] = (unsigned long)(arg3); \
1506 _argvec[4] = (unsigned long)(arg4); \
1507 _argvec[5] = (unsigned long)(arg5); \
1508 _argvec[6] = (unsigned long)(arg6); \
1509 _argvec[7] = (unsigned long)(arg7); \
1510 _argvec[8] = (unsigned long)(arg8); \
1511 _argvec[9] = (unsigned long)(arg9); \
1512 _argvec[10] = (unsigned long)(arg10); \
1513 _argvec[11] = (unsigned long)(arg11); \
1514 __asm__ volatile( \
1515 VALGRIND_ALIGN_STACK \
1516 "subl $4, %%esp\n\t" \
1517 "pushl 44(%%eax)\n\t" \
1518 "pushl 40(%%eax)\n\t" \
1519 "pushl 36(%%eax)\n\t" \
1520 "pushl 32(%%eax)\n\t" \
1521 "pushl 28(%%eax)\n\t" \
1522 "pushl 24(%%eax)\n\t" \
1523 "pushl 20(%%eax)\n\t" \
1524 "pushl 16(%%eax)\n\t" \
1525 "pushl 12(%%eax)\n\t" \
1526 "pushl 8(%%eax)\n\t" \
1527 "pushl 4(%%eax)\n\t" \
1528 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1529 VALGRIND_CALL_NOREDIR_EAX \
1530 VALGRIND_RESTORE_STACK \
1531 : /*out*/ "=a" (_res) \
1532 : /*in*/ "a" (&_argvec[0]) \
1533 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1534 ); \
1535 lval = (__typeof__(lval)) _res; \
1536 } while (0)
1537
1538 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1539 arg6,arg7,arg8,arg9,arg10, \
1540 arg11,arg12) \
1541 do { \
1542 volatile OrigFn _orig = (orig); \
1543 volatile unsigned long _argvec[13]; \
1544 volatile unsigned long _res; \
1545 _argvec[0] = (unsigned long)_orig.nraddr; \
1546 _argvec[1] = (unsigned long)(arg1); \
1547 _argvec[2] = (unsigned long)(arg2); \
1548 _argvec[3] = (unsigned long)(arg3); \
1549 _argvec[4] = (unsigned long)(arg4); \
1550 _argvec[5] = (unsigned long)(arg5); \
1551 _argvec[6] = (unsigned long)(arg6); \
1552 _argvec[7] = (unsigned long)(arg7); \
1553 _argvec[8] = (unsigned long)(arg8); \
1554 _argvec[9] = (unsigned long)(arg9); \
1555 _argvec[10] = (unsigned long)(arg10); \
1556 _argvec[11] = (unsigned long)(arg11); \
1557 _argvec[12] = (unsigned long)(arg12); \
1558 __asm__ volatile( \
1559 VALGRIND_ALIGN_STACK \
1560 "pushl 48(%%eax)\n\t" \
1561 "pushl 44(%%eax)\n\t" \
1562 "pushl 40(%%eax)\n\t" \
1563 "pushl 36(%%eax)\n\t" \
1564 "pushl 32(%%eax)\n\t" \
1565 "pushl 28(%%eax)\n\t" \
1566 "pushl 24(%%eax)\n\t" \
1567 "pushl 20(%%eax)\n\t" \
1568 "pushl 16(%%eax)\n\t" \
1569 "pushl 12(%%eax)\n\t" \
1570 "pushl 8(%%eax)\n\t" \
1571 "pushl 4(%%eax)\n\t" \
1572 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1573 VALGRIND_CALL_NOREDIR_EAX \
1574 VALGRIND_RESTORE_STACK \
1575 : /*out*/ "=a" (_res) \
1576 : /*in*/ "a" (&_argvec[0]) \
1577 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1578 ); \
1579 lval = (__typeof__(lval)) _res; \
1580 } while (0)
1581
1582 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1583
1584 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1585
1586 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1587 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_dragonfly)
1588
1589 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1590
1591 /* These regs are trashed by the hidden call. */
1592 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1593 "rdi", "r8", "r9", "r10", "r11"
1594
1595 /* This is all pretty complex. It's so as to make stack unwinding
1596 work reliably. See bug 243270. The basic problem is the sub and
1597 add of 128 of %rsp in all of the following macros. If gcc believes
1598 the CFA is in %rsp, then unwinding may fail, because what's at the
1599 CFA is not what gcc "expected" when it constructs the CFIs for the
1600 places where the macros are instantiated.
1601
1602 But we can't just add a CFI annotation to increase the CFA offset
1603 by 128, to match the sub of 128 from %rsp, because we don't know
1604 whether gcc has chosen %rsp as the CFA at that point, or whether it
1605 has chosen some other register (eg, %rbp). In the latter case,
1606 adding a CFI annotation to change the CFA offset is simply wrong.
1607
1608 So the solution is to get hold of the CFA using
1609 __builtin_dwarf_cfa(), put it in a known register, and add a
1610 CFI annotation to say what the register is. We choose %rbp for
1611 this (perhaps perversely), because:
1612
1613 (1) %rbp is already subject to unwinding. If a new register was
1614 chosen then the unwinder would have to unwind it in all stack
1615 traces, which is expensive, and
1616
1617 (2) %rbp is already subject to precise exception updates in the
1618 JIT. If a new register was chosen, we'd have to have precise
1619 exceptions for it too, which reduces performance of the
1620 generated code.
1621
1622 However .. one extra complication. We can't just whack the result
1623 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1624 list of trashed registers at the end of the inline assembly
1625 fragments; gcc won't allow %rbp to appear in that list. Hence
1626 instead we need to stash %rbp in %r15 for the duration of the asm,
1627 and say that %r15 is trashed instead. gcc seems happy to go with
1628 that.
1629
1630 Oh .. and this all needs to be conditionalised so that it is
1631 unchanged from before this commit, when compiled with older gccs
1632 that don't support __builtin_dwarf_cfa. Furthermore, since
1633 this header file is freestanding, it has to be independent of
1634 config.h, and so the following conditionalisation cannot depend on
1635 configure time checks.
1636
1637 Although it's not clear from
1638 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1639 this expression excludes Darwin.
1640 .cfi directives in Darwin assembly appear to be completely
1641 different and I haven't investigated how they work.
1642
1643 For even more entertainment value, note we have to use the
1644 completely undocumented __builtin_dwarf_cfa(), which appears to
1645 really compute the CFA, whereas __builtin_frame_address(0) claims
1646 to but actually doesn't. See
1647 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1648 */
1649 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1650 # define __FRAME_POINTER \
1651 ,"r"(__builtin_dwarf_cfa())
1652 # define VALGRIND_CFI_PROLOGUE \
1653 "movq %%rbp, %%r15\n\t" \
1654 "movq %2, %%rbp\n\t" \
1655 ".cfi_remember_state\n\t" \
1656 ".cfi_def_cfa rbp, 0\n\t"
1657 # define VALGRIND_CFI_EPILOGUE \
1658 "movq %%r15, %%rbp\n\t" \
1659 ".cfi_restore_state\n\t"
1660 #else
1661 # define __FRAME_POINTER
1662 # define VALGRIND_CFI_PROLOGUE
1663 # define VALGRIND_CFI_EPILOGUE
1664 #endif
1665
1666 /* Macros to save and align the stack before making a function
1667 call and restore it afterwards as gcc may not keep the stack
1668 pointer aligned if it doesn't realise calls are being made
1669 to other functions. */
1670
1671 #define VALGRIND_ALIGN_STACK \
1672 "movq %%rsp,%%r14\n\t" \
1673 "andq $0xfffffffffffffff0,%%rsp\n\t"
1674 #define VALGRIND_RESTORE_STACK \
1675 "movq %%r14,%%rsp\n\t"
1676
1677 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1678 long) == 8. */
1679
1680 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1681 macros. In order not to trash the stack redzone, we need to drop
1682 %rsp by 128 before the hidden call, and restore afterwards. The
1683 nastyness is that it is only by luck that the stack still appears
1684 to be unwindable during the hidden call - since then the behaviour
1685 of any routine using this macro does not match what the CFI data
1686 says. Sigh.
1687
1688 Why is this important? Imagine that a wrapper has a stack
1689 allocated local, and passes to the hidden call, a pointer to it.
1690 Because gcc does not know about the hidden call, it may allocate
1691 that local in the redzone. Unfortunately the hidden call may then
1692 trash it before it comes to use it. So we must step clear of the
1693 redzone, for the duration of the hidden call, to make it safe.
1694
1695 Probably the same problem afflicts the other redzone-style ABIs too
1696 (ppc64-linux); but for those, the stack is
1697 self describing (none of this CFI nonsense) so at least messing
1698 with the stack pointer doesn't give a danger of non-unwindable
1699 stack. */
1700
1701 #define CALL_FN_W_v(lval, orig) \
1702 do { \
1703 volatile OrigFn _orig = (orig); \
1704 volatile unsigned long _argvec[1]; \
1705 volatile unsigned long _res; \
1706 _argvec[0] = (unsigned long)_orig.nraddr; \
1707 __asm__ volatile( \
1708 VALGRIND_CFI_PROLOGUE \
1709 VALGRIND_ALIGN_STACK \
1710 "subq $128,%%rsp\n\t" \
1711 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1712 VALGRIND_CALL_NOREDIR_RAX \
1713 VALGRIND_RESTORE_STACK \
1714 VALGRIND_CFI_EPILOGUE \
1715 : /*out*/ "=a" (_res) \
1716 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1717 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1718 ); \
1719 lval = (__typeof__(lval)) _res; \
1720 } while (0)
1721
1722 #define CALL_FN_W_W(lval, orig, arg1) \
1723 do { \
1724 volatile OrigFn _orig = (orig); \
1725 volatile unsigned long _argvec[2]; \
1726 volatile unsigned long _res; \
1727 _argvec[0] = (unsigned long)_orig.nraddr; \
1728 _argvec[1] = (unsigned long)(arg1); \
1729 __asm__ volatile( \
1730 VALGRIND_CFI_PROLOGUE \
1731 VALGRIND_ALIGN_STACK \
1732 "subq $128,%%rsp\n\t" \
1733 "movq 8(%%rax), %%rdi\n\t" \
1734 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1735 VALGRIND_CALL_NOREDIR_RAX \
1736 VALGRIND_RESTORE_STACK \
1737 VALGRIND_CFI_EPILOGUE \
1738 : /*out*/ "=a" (_res) \
1739 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1740 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1741 ); \
1742 lval = (__typeof__(lval)) _res; \
1743 } while (0)
1744
1745 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1746 do { \
1747 volatile OrigFn _orig = (orig); \
1748 volatile unsigned long _argvec[3]; \
1749 volatile unsigned long _res; \
1750 _argvec[0] = (unsigned long)_orig.nraddr; \
1751 _argvec[1] = (unsigned long)(arg1); \
1752 _argvec[2] = (unsigned long)(arg2); \
1753 __asm__ volatile( \
1754 VALGRIND_CFI_PROLOGUE \
1755 VALGRIND_ALIGN_STACK \
1756 "subq $128,%%rsp\n\t" \
1757 "movq 16(%%rax), %%rsi\n\t" \
1758 "movq 8(%%rax), %%rdi\n\t" \
1759 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1760 VALGRIND_CALL_NOREDIR_RAX \
1761 VALGRIND_RESTORE_STACK \
1762 VALGRIND_CFI_EPILOGUE \
1763 : /*out*/ "=a" (_res) \
1764 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1765 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1766 ); \
1767 lval = (__typeof__(lval)) _res; \
1768 } while (0)
1769
1770 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1771 do { \
1772 volatile OrigFn _orig = (orig); \
1773 volatile unsigned long _argvec[4]; \
1774 volatile unsigned long _res; \
1775 _argvec[0] = (unsigned long)_orig.nraddr; \
1776 _argvec[1] = (unsigned long)(arg1); \
1777 _argvec[2] = (unsigned long)(arg2); \
1778 _argvec[3] = (unsigned long)(arg3); \
1779 __asm__ volatile( \
1780 VALGRIND_CFI_PROLOGUE \
1781 VALGRIND_ALIGN_STACK \
1782 "subq $128,%%rsp\n\t" \
1783 "movq 24(%%rax), %%rdx\n\t" \
1784 "movq 16(%%rax), %%rsi\n\t" \
1785 "movq 8(%%rax), %%rdi\n\t" \
1786 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1787 VALGRIND_CALL_NOREDIR_RAX \
1788 VALGRIND_RESTORE_STACK \
1789 VALGRIND_CFI_EPILOGUE \
1790 : /*out*/ "=a" (_res) \
1791 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1792 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1793 ); \
1794 lval = (__typeof__(lval)) _res; \
1795 } while (0)
1796
1797 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1798 do { \
1799 volatile OrigFn _orig = (orig); \
1800 volatile unsigned long _argvec[5]; \
1801 volatile unsigned long _res; \
1802 _argvec[0] = (unsigned long)_orig.nraddr; \
1803 _argvec[1] = (unsigned long)(arg1); \
1804 _argvec[2] = (unsigned long)(arg2); \
1805 _argvec[3] = (unsigned long)(arg3); \
1806 _argvec[4] = (unsigned long)(arg4); \
1807 __asm__ volatile( \
1808 VALGRIND_CFI_PROLOGUE \
1809 VALGRIND_ALIGN_STACK \
1810 "subq $128,%%rsp\n\t" \
1811 "movq 32(%%rax), %%rcx\n\t" \
1812 "movq 24(%%rax), %%rdx\n\t" \
1813 "movq 16(%%rax), %%rsi\n\t" \
1814 "movq 8(%%rax), %%rdi\n\t" \
1815 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1816 VALGRIND_CALL_NOREDIR_RAX \
1817 VALGRIND_RESTORE_STACK \
1818 VALGRIND_CFI_EPILOGUE \
1819 : /*out*/ "=a" (_res) \
1820 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1821 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1822 ); \
1823 lval = (__typeof__(lval)) _res; \
1824 } while (0)
1825
1826 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1827 do { \
1828 volatile OrigFn _orig = (orig); \
1829 volatile unsigned long _argvec[6]; \
1830 volatile unsigned long _res; \
1831 _argvec[0] = (unsigned long)_orig.nraddr; \
1832 _argvec[1] = (unsigned long)(arg1); \
1833 _argvec[2] = (unsigned long)(arg2); \
1834 _argvec[3] = (unsigned long)(arg3); \
1835 _argvec[4] = (unsigned long)(arg4); \
1836 _argvec[5] = (unsigned long)(arg5); \
1837 __asm__ volatile( \
1838 VALGRIND_CFI_PROLOGUE \
1839 VALGRIND_ALIGN_STACK \
1840 "subq $128,%%rsp\n\t" \
1841 "movq 40(%%rax), %%r8\n\t" \
1842 "movq 32(%%rax), %%rcx\n\t" \
1843 "movq 24(%%rax), %%rdx\n\t" \
1844 "movq 16(%%rax), %%rsi\n\t" \
1845 "movq 8(%%rax), %%rdi\n\t" \
1846 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1847 VALGRIND_CALL_NOREDIR_RAX \
1848 VALGRIND_RESTORE_STACK \
1849 VALGRIND_CFI_EPILOGUE \
1850 : /*out*/ "=a" (_res) \
1851 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1852 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1853 ); \
1854 lval = (__typeof__(lval)) _res; \
1855 } while (0)
1856
1857 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1858 do { \
1859 volatile OrigFn _orig = (orig); \
1860 volatile unsigned long _argvec[7]; \
1861 volatile unsigned long _res; \
1862 _argvec[0] = (unsigned long)_orig.nraddr; \
1863 _argvec[1] = (unsigned long)(arg1); \
1864 _argvec[2] = (unsigned long)(arg2); \
1865 _argvec[3] = (unsigned long)(arg3); \
1866 _argvec[4] = (unsigned long)(arg4); \
1867 _argvec[5] = (unsigned long)(arg5); \
1868 _argvec[6] = (unsigned long)(arg6); \
1869 __asm__ volatile( \
1870 VALGRIND_CFI_PROLOGUE \
1871 VALGRIND_ALIGN_STACK \
1872 "subq $128,%%rsp\n\t" \
1873 "movq 48(%%rax), %%r9\n\t" \
1874 "movq 40(%%rax), %%r8\n\t" \
1875 "movq 32(%%rax), %%rcx\n\t" \
1876 "movq 24(%%rax), %%rdx\n\t" \
1877 "movq 16(%%rax), %%rsi\n\t" \
1878 "movq 8(%%rax), %%rdi\n\t" \
1879 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1880 VALGRIND_CALL_NOREDIR_RAX \
1881 VALGRIND_RESTORE_STACK \
1882 VALGRIND_CFI_EPILOGUE \
1883 : /*out*/ "=a" (_res) \
1884 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1885 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1886 ); \
1887 lval = (__typeof__(lval)) _res; \
1888 } while (0)
1889
1890 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1891 arg7) \
1892 do { \
1893 volatile OrigFn _orig = (orig); \
1894 volatile unsigned long _argvec[8]; \
1895 volatile unsigned long _res; \
1896 _argvec[0] = (unsigned long)_orig.nraddr; \
1897 _argvec[1] = (unsigned long)(arg1); \
1898 _argvec[2] = (unsigned long)(arg2); \
1899 _argvec[3] = (unsigned long)(arg3); \
1900 _argvec[4] = (unsigned long)(arg4); \
1901 _argvec[5] = (unsigned long)(arg5); \
1902 _argvec[6] = (unsigned long)(arg6); \
1903 _argvec[7] = (unsigned long)(arg7); \
1904 __asm__ volatile( \
1905 VALGRIND_CFI_PROLOGUE \
1906 VALGRIND_ALIGN_STACK \
1907 "subq $136,%%rsp\n\t" \
1908 "pushq 56(%%rax)\n\t" \
1909 "movq 48(%%rax), %%r9\n\t" \
1910 "movq 40(%%rax), %%r8\n\t" \
1911 "movq 32(%%rax), %%rcx\n\t" \
1912 "movq 24(%%rax), %%rdx\n\t" \
1913 "movq 16(%%rax), %%rsi\n\t" \
1914 "movq 8(%%rax), %%rdi\n\t" \
1915 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1916 VALGRIND_CALL_NOREDIR_RAX \
1917 VALGRIND_RESTORE_STACK \
1918 VALGRIND_CFI_EPILOGUE \
1919 : /*out*/ "=a" (_res) \
1920 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1921 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1922 ); \
1923 lval = (__typeof__(lval)) _res; \
1924 } while (0)
1925
1926 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1927 arg7,arg8) \
1928 do { \
1929 volatile OrigFn _orig = (orig); \
1930 volatile unsigned long _argvec[9]; \
1931 volatile unsigned long _res; \
1932 _argvec[0] = (unsigned long)_orig.nraddr; \
1933 _argvec[1] = (unsigned long)(arg1); \
1934 _argvec[2] = (unsigned long)(arg2); \
1935 _argvec[3] = (unsigned long)(arg3); \
1936 _argvec[4] = (unsigned long)(arg4); \
1937 _argvec[5] = (unsigned long)(arg5); \
1938 _argvec[6] = (unsigned long)(arg6); \
1939 _argvec[7] = (unsigned long)(arg7); \
1940 _argvec[8] = (unsigned long)(arg8); \
1941 __asm__ volatile( \
1942 VALGRIND_CFI_PROLOGUE \
1943 VALGRIND_ALIGN_STACK \
1944 "subq $128,%%rsp\n\t" \
1945 "pushq 64(%%rax)\n\t" \
1946 "pushq 56(%%rax)\n\t" \
1947 "movq 48(%%rax), %%r9\n\t" \
1948 "movq 40(%%rax), %%r8\n\t" \
1949 "movq 32(%%rax), %%rcx\n\t" \
1950 "movq 24(%%rax), %%rdx\n\t" \
1951 "movq 16(%%rax), %%rsi\n\t" \
1952 "movq 8(%%rax), %%rdi\n\t" \
1953 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1954 VALGRIND_CALL_NOREDIR_RAX \
1955 VALGRIND_RESTORE_STACK \
1956 VALGRIND_CFI_EPILOGUE \
1957 : /*out*/ "=a" (_res) \
1958 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1959 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1960 ); \
1961 lval = (__typeof__(lval)) _res; \
1962 } while (0)
1963
1964 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1965 arg7,arg8,arg9) \
1966 do { \
1967 volatile OrigFn _orig = (orig); \
1968 volatile unsigned long _argvec[10]; \
1969 volatile unsigned long _res; \
1970 _argvec[0] = (unsigned long)_orig.nraddr; \
1971 _argvec[1] = (unsigned long)(arg1); \
1972 _argvec[2] = (unsigned long)(arg2); \
1973 _argvec[3] = (unsigned long)(arg3); \
1974 _argvec[4] = (unsigned long)(arg4); \
1975 _argvec[5] = (unsigned long)(arg5); \
1976 _argvec[6] = (unsigned long)(arg6); \
1977 _argvec[7] = (unsigned long)(arg7); \
1978 _argvec[8] = (unsigned long)(arg8); \
1979 _argvec[9] = (unsigned long)(arg9); \
1980 __asm__ volatile( \
1981 VALGRIND_CFI_PROLOGUE \
1982 VALGRIND_ALIGN_STACK \
1983 "subq $136,%%rsp\n\t" \
1984 "pushq 72(%%rax)\n\t" \
1985 "pushq 64(%%rax)\n\t" \
1986 "pushq 56(%%rax)\n\t" \
1987 "movq 48(%%rax), %%r9\n\t" \
1988 "movq 40(%%rax), %%r8\n\t" \
1989 "movq 32(%%rax), %%rcx\n\t" \
1990 "movq 24(%%rax), %%rdx\n\t" \
1991 "movq 16(%%rax), %%rsi\n\t" \
1992 "movq 8(%%rax), %%rdi\n\t" \
1993 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1994 VALGRIND_CALL_NOREDIR_RAX \
1995 VALGRIND_RESTORE_STACK \
1996 VALGRIND_CFI_EPILOGUE \
1997 : /*out*/ "=a" (_res) \
1998 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1999 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2000 ); \
2001 lval = (__typeof__(lval)) _res; \
2002 } while (0)
2003
2004 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2005 arg7,arg8,arg9,arg10) \
2006 do { \
2007 volatile OrigFn _orig = (orig); \
2008 volatile unsigned long _argvec[11]; \
2009 volatile unsigned long _res; \
2010 _argvec[0] = (unsigned long)_orig.nraddr; \
2011 _argvec[1] = (unsigned long)(arg1); \
2012 _argvec[2] = (unsigned long)(arg2); \
2013 _argvec[3] = (unsigned long)(arg3); \
2014 _argvec[4] = (unsigned long)(arg4); \
2015 _argvec[5] = (unsigned long)(arg5); \
2016 _argvec[6] = (unsigned long)(arg6); \
2017 _argvec[7] = (unsigned long)(arg7); \
2018 _argvec[8] = (unsigned long)(arg8); \
2019 _argvec[9] = (unsigned long)(arg9); \
2020 _argvec[10] = (unsigned long)(arg10); \
2021 __asm__ volatile( \
2022 VALGRIND_CFI_PROLOGUE \
2023 VALGRIND_ALIGN_STACK \
2024 "subq $128,%%rsp\n\t" \
2025 "pushq 80(%%rax)\n\t" \
2026 "pushq 72(%%rax)\n\t" \
2027 "pushq 64(%%rax)\n\t" \
2028 "pushq 56(%%rax)\n\t" \
2029 "movq 48(%%rax), %%r9\n\t" \
2030 "movq 40(%%rax), %%r8\n\t" \
2031 "movq 32(%%rax), %%rcx\n\t" \
2032 "movq 24(%%rax), %%rdx\n\t" \
2033 "movq 16(%%rax), %%rsi\n\t" \
2034 "movq 8(%%rax), %%rdi\n\t" \
2035 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2036 VALGRIND_CALL_NOREDIR_RAX \
2037 VALGRIND_RESTORE_STACK \
2038 VALGRIND_CFI_EPILOGUE \
2039 : /*out*/ "=a" (_res) \
2040 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2041 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2042 ); \
2043 lval = (__typeof__(lval)) _res; \
2044 } while (0)
2045
2046 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2047 arg7,arg8,arg9,arg10,arg11) \
2048 do { \
2049 volatile OrigFn _orig = (orig); \
2050 volatile unsigned long _argvec[12]; \
2051 volatile unsigned long _res; \
2052 _argvec[0] = (unsigned long)_orig.nraddr; \
2053 _argvec[1] = (unsigned long)(arg1); \
2054 _argvec[2] = (unsigned long)(arg2); \
2055 _argvec[3] = (unsigned long)(arg3); \
2056 _argvec[4] = (unsigned long)(arg4); \
2057 _argvec[5] = (unsigned long)(arg5); \
2058 _argvec[6] = (unsigned long)(arg6); \
2059 _argvec[7] = (unsigned long)(arg7); \
2060 _argvec[8] = (unsigned long)(arg8); \
2061 _argvec[9] = (unsigned long)(arg9); \
2062 _argvec[10] = (unsigned long)(arg10); \
2063 _argvec[11] = (unsigned long)(arg11); \
2064 __asm__ volatile( \
2065 VALGRIND_CFI_PROLOGUE \
2066 VALGRIND_ALIGN_STACK \
2067 "subq $136,%%rsp\n\t" \
2068 "pushq 88(%%rax)\n\t" \
2069 "pushq 80(%%rax)\n\t" \
2070 "pushq 72(%%rax)\n\t" \
2071 "pushq 64(%%rax)\n\t" \
2072 "pushq 56(%%rax)\n\t" \
2073 "movq 48(%%rax), %%r9\n\t" \
2074 "movq 40(%%rax), %%r8\n\t" \
2075 "movq 32(%%rax), %%rcx\n\t" \
2076 "movq 24(%%rax), %%rdx\n\t" \
2077 "movq 16(%%rax), %%rsi\n\t" \
2078 "movq 8(%%rax), %%rdi\n\t" \
2079 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2080 VALGRIND_CALL_NOREDIR_RAX \
2081 VALGRIND_RESTORE_STACK \
2082 VALGRIND_CFI_EPILOGUE \
2083 : /*out*/ "=a" (_res) \
2084 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2085 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2086 ); \
2087 lval = (__typeof__(lval)) _res; \
2088 } while (0)
2089
2090 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2091 arg7,arg8,arg9,arg10,arg11,arg12) \
2092 do { \
2093 volatile OrigFn _orig = (orig); \
2094 volatile unsigned long _argvec[13]; \
2095 volatile unsigned long _res; \
2096 _argvec[0] = (unsigned long)_orig.nraddr; \
2097 _argvec[1] = (unsigned long)(arg1); \
2098 _argvec[2] = (unsigned long)(arg2); \
2099 _argvec[3] = (unsigned long)(arg3); \
2100 _argvec[4] = (unsigned long)(arg4); \
2101 _argvec[5] = (unsigned long)(arg5); \
2102 _argvec[6] = (unsigned long)(arg6); \
2103 _argvec[7] = (unsigned long)(arg7); \
2104 _argvec[8] = (unsigned long)(arg8); \
2105 _argvec[9] = (unsigned long)(arg9); \
2106 _argvec[10] = (unsigned long)(arg10); \
2107 _argvec[11] = (unsigned long)(arg11); \
2108 _argvec[12] = (unsigned long)(arg12); \
2109 __asm__ volatile( \
2110 VALGRIND_CFI_PROLOGUE \
2111 VALGRIND_ALIGN_STACK \
2112 "subq $128,%%rsp\n\t" \
2113 "pushq 96(%%rax)\n\t" \
2114 "pushq 88(%%rax)\n\t" \
2115 "pushq 80(%%rax)\n\t" \
2116 "pushq 72(%%rax)\n\t" \
2117 "pushq 64(%%rax)\n\t" \
2118 "pushq 56(%%rax)\n\t" \
2119 "movq 48(%%rax), %%r9\n\t" \
2120 "movq 40(%%rax), %%r8\n\t" \
2121 "movq 32(%%rax), %%rcx\n\t" \
2122 "movq 24(%%rax), %%rdx\n\t" \
2123 "movq 16(%%rax), %%rsi\n\t" \
2124 "movq 8(%%rax), %%rdi\n\t" \
2125 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2126 VALGRIND_CALL_NOREDIR_RAX \
2127 VALGRIND_RESTORE_STACK \
2128 VALGRIND_CFI_EPILOGUE \
2129 : /*out*/ "=a" (_res) \
2130 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2131 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2132 ); \
2133 lval = (__typeof__(lval)) _res; \
2134 } while (0)
2135
2136 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2137
2138 /* ------------------------ ppc32-linux ------------------------ */
2139
2140 #if defined(PLAT_ppc32_linux)
2141
2142 /* This is useful for finding out about the on-stack stuff:
2143
2144 extern int f9 ( int,int,int,int,int,int,int,int,int );
2145 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2146 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2147 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2148
2149 int g9 ( void ) {
2150 return f9(11,22,33,44,55,66,77,88,99);
2151 }
2152 int g10 ( void ) {
2153 return f10(11,22,33,44,55,66,77,88,99,110);
2154 }
2155 int g11 ( void ) {
2156 return f11(11,22,33,44,55,66,77,88,99,110,121);
2157 }
2158 int g12 ( void ) {
2159 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2160 }
2161 */
2162
2163 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2164
2165 /* These regs are trashed by the hidden call. */
2166 #define __CALLER_SAVED_REGS \
2167 "lr", "ctr", "xer", \
2168 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2169 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2170 "r11", "r12", "r13"
2171
2172 /* Macros to save and align the stack before making a function
2173 call and restore it afterwards as gcc may not keep the stack
2174 pointer aligned if it doesn't realise calls are being made
2175 to other functions. */
2176
2177 #define VALGRIND_ALIGN_STACK \
2178 "mr 28,1\n\t" \
2179 "rlwinm 1,1,0,0,27\n\t"
2180 #define VALGRIND_RESTORE_STACK \
2181 "mr 1,28\n\t"
2182
2183 /* These CALL_FN_ macros assume that on ppc32-linux,
2184 sizeof(unsigned long) == 4. */
2185
2186 #define CALL_FN_W_v(lval, orig) \
2187 do { \
2188 volatile OrigFn _orig = (orig); \
2189 volatile unsigned long _argvec[1]; \
2190 volatile unsigned long _res; \
2191 _argvec[0] = (unsigned long)_orig.nraddr; \
2192 __asm__ volatile( \
2193 VALGRIND_ALIGN_STACK \
2194 "mr 11,%1\n\t" \
2195 "lwz 11,0(11)\n\t" /* target->r11 */ \
2196 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2197 VALGRIND_RESTORE_STACK \
2198 "mr %0,3" \
2199 : /*out*/ "=r" (_res) \
2200 : /*in*/ "r" (&_argvec[0]) \
2201 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2202 ); \
2203 lval = (__typeof__(lval)) _res; \
2204 } while (0)
2205
2206 #define CALL_FN_W_W(lval, orig, arg1) \
2207 do { \
2208 volatile OrigFn _orig = (orig); \
2209 volatile unsigned long _argvec[2]; \
2210 volatile unsigned long _res; \
2211 _argvec[0] = (unsigned long)_orig.nraddr; \
2212 _argvec[1] = (unsigned long)arg1; \
2213 __asm__ volatile( \
2214 VALGRIND_ALIGN_STACK \
2215 "mr 11,%1\n\t" \
2216 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2217 "lwz 11,0(11)\n\t" /* target->r11 */ \
2218 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2219 VALGRIND_RESTORE_STACK \
2220 "mr %0,3" \
2221 : /*out*/ "=r" (_res) \
2222 : /*in*/ "r" (&_argvec[0]) \
2223 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2224 ); \
2225 lval = (__typeof__(lval)) _res; \
2226 } while (0)
2227
2228 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2229 do { \
2230 volatile OrigFn _orig = (orig); \
2231 volatile unsigned long _argvec[3]; \
2232 volatile unsigned long _res; \
2233 _argvec[0] = (unsigned long)_orig.nraddr; \
2234 _argvec[1] = (unsigned long)arg1; \
2235 _argvec[2] = (unsigned long)arg2; \
2236 __asm__ volatile( \
2237 VALGRIND_ALIGN_STACK \
2238 "mr 11,%1\n\t" \
2239 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2240 "lwz 4,8(11)\n\t" \
2241 "lwz 11,0(11)\n\t" /* target->r11 */ \
2242 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2243 VALGRIND_RESTORE_STACK \
2244 "mr %0,3" \
2245 : /*out*/ "=r" (_res) \
2246 : /*in*/ "r" (&_argvec[0]) \
2247 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2248 ); \
2249 lval = (__typeof__(lval)) _res; \
2250 } while (0)
2251
2252 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2253 do { \
2254 volatile OrigFn _orig = (orig); \
2255 volatile unsigned long _argvec[4]; \
2256 volatile unsigned long _res; \
2257 _argvec[0] = (unsigned long)_orig.nraddr; \
2258 _argvec[1] = (unsigned long)arg1; \
2259 _argvec[2] = (unsigned long)arg2; \
2260 _argvec[3] = (unsigned long)arg3; \
2261 __asm__ volatile( \
2262 VALGRIND_ALIGN_STACK \
2263 "mr 11,%1\n\t" \
2264 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2265 "lwz 4,8(11)\n\t" \
2266 "lwz 5,12(11)\n\t" \
2267 "lwz 11,0(11)\n\t" /* target->r11 */ \
2268 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2269 VALGRIND_RESTORE_STACK \
2270 "mr %0,3" \
2271 : /*out*/ "=r" (_res) \
2272 : /*in*/ "r" (&_argvec[0]) \
2273 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2274 ); \
2275 lval = (__typeof__(lval)) _res; \
2276 } while (0)
2277
2278 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2279 do { \
2280 volatile OrigFn _orig = (orig); \
2281 volatile unsigned long _argvec[5]; \
2282 volatile unsigned long _res; \
2283 _argvec[0] = (unsigned long)_orig.nraddr; \
2284 _argvec[1] = (unsigned long)arg1; \
2285 _argvec[2] = (unsigned long)arg2; \
2286 _argvec[3] = (unsigned long)arg3; \
2287 _argvec[4] = (unsigned long)arg4; \
2288 __asm__ volatile( \
2289 VALGRIND_ALIGN_STACK \
2290 "mr 11,%1\n\t" \
2291 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2292 "lwz 4,8(11)\n\t" \
2293 "lwz 5,12(11)\n\t" \
2294 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2295 "lwz 11,0(11)\n\t" /* target->r11 */ \
2296 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2297 VALGRIND_RESTORE_STACK \
2298 "mr %0,3" \
2299 : /*out*/ "=r" (_res) \
2300 : /*in*/ "r" (&_argvec[0]) \
2301 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2302 ); \
2303 lval = (__typeof__(lval)) _res; \
2304 } while (0)
2305
2306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2307 do { \
2308 volatile OrigFn _orig = (orig); \
2309 volatile unsigned long _argvec[6]; \
2310 volatile unsigned long _res; \
2311 _argvec[0] = (unsigned long)_orig.nraddr; \
2312 _argvec[1] = (unsigned long)arg1; \
2313 _argvec[2] = (unsigned long)arg2; \
2314 _argvec[3] = (unsigned long)arg3; \
2315 _argvec[4] = (unsigned long)arg4; \
2316 _argvec[5] = (unsigned long)arg5; \
2317 __asm__ volatile( \
2318 VALGRIND_ALIGN_STACK \
2319 "mr 11,%1\n\t" \
2320 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2321 "lwz 4,8(11)\n\t" \
2322 "lwz 5,12(11)\n\t" \
2323 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2324 "lwz 7,20(11)\n\t" \
2325 "lwz 11,0(11)\n\t" /* target->r11 */ \
2326 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2327 VALGRIND_RESTORE_STACK \
2328 "mr %0,3" \
2329 : /*out*/ "=r" (_res) \
2330 : /*in*/ "r" (&_argvec[0]) \
2331 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2332 ); \
2333 lval = (__typeof__(lval)) _res; \
2334 } while (0)
2335
2336 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2337 do { \
2338 volatile OrigFn _orig = (orig); \
2339 volatile unsigned long _argvec[7]; \
2340 volatile unsigned long _res; \
2341 _argvec[0] = (unsigned long)_orig.nraddr; \
2342 _argvec[1] = (unsigned long)arg1; \
2343 _argvec[2] = (unsigned long)arg2; \
2344 _argvec[3] = (unsigned long)arg3; \
2345 _argvec[4] = (unsigned long)arg4; \
2346 _argvec[5] = (unsigned long)arg5; \
2347 _argvec[6] = (unsigned long)arg6; \
2348 __asm__ volatile( \
2349 VALGRIND_ALIGN_STACK \
2350 "mr 11,%1\n\t" \
2351 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2352 "lwz 4,8(11)\n\t" \
2353 "lwz 5,12(11)\n\t" \
2354 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2355 "lwz 7,20(11)\n\t" \
2356 "lwz 8,24(11)\n\t" \
2357 "lwz 11,0(11)\n\t" /* target->r11 */ \
2358 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2359 VALGRIND_RESTORE_STACK \
2360 "mr %0,3" \
2361 : /*out*/ "=r" (_res) \
2362 : /*in*/ "r" (&_argvec[0]) \
2363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2364 ); \
2365 lval = (__typeof__(lval)) _res; \
2366 } while (0)
2367
2368 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2369 arg7) \
2370 do { \
2371 volatile OrigFn _orig = (orig); \
2372 volatile unsigned long _argvec[8]; \
2373 volatile unsigned long _res; \
2374 _argvec[0] = (unsigned long)_orig.nraddr; \
2375 _argvec[1] = (unsigned long)arg1; \
2376 _argvec[2] = (unsigned long)arg2; \
2377 _argvec[3] = (unsigned long)arg3; \
2378 _argvec[4] = (unsigned long)arg4; \
2379 _argvec[5] = (unsigned long)arg5; \
2380 _argvec[6] = (unsigned long)arg6; \
2381 _argvec[7] = (unsigned long)arg7; \
2382 __asm__ volatile( \
2383 VALGRIND_ALIGN_STACK \
2384 "mr 11,%1\n\t" \
2385 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2386 "lwz 4,8(11)\n\t" \
2387 "lwz 5,12(11)\n\t" \
2388 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2389 "lwz 7,20(11)\n\t" \
2390 "lwz 8,24(11)\n\t" \
2391 "lwz 9,28(11)\n\t" \
2392 "lwz 11,0(11)\n\t" /* target->r11 */ \
2393 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2394 VALGRIND_RESTORE_STACK \
2395 "mr %0,3" \
2396 : /*out*/ "=r" (_res) \
2397 : /*in*/ "r" (&_argvec[0]) \
2398 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2399 ); \
2400 lval = (__typeof__(lval)) _res; \
2401 } while (0)
2402
2403 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2404 arg7,arg8) \
2405 do { \
2406 volatile OrigFn _orig = (orig); \
2407 volatile unsigned long _argvec[9]; \
2408 volatile unsigned long _res; \
2409 _argvec[0] = (unsigned long)_orig.nraddr; \
2410 _argvec[1] = (unsigned long)arg1; \
2411 _argvec[2] = (unsigned long)arg2; \
2412 _argvec[3] = (unsigned long)arg3; \
2413 _argvec[4] = (unsigned long)arg4; \
2414 _argvec[5] = (unsigned long)arg5; \
2415 _argvec[6] = (unsigned long)arg6; \
2416 _argvec[7] = (unsigned long)arg7; \
2417 _argvec[8] = (unsigned long)arg8; \
2418 __asm__ volatile( \
2419 VALGRIND_ALIGN_STACK \
2420 "mr 11,%1\n\t" \
2421 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2422 "lwz 4,8(11)\n\t" \
2423 "lwz 5,12(11)\n\t" \
2424 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2425 "lwz 7,20(11)\n\t" \
2426 "lwz 8,24(11)\n\t" \
2427 "lwz 9,28(11)\n\t" \
2428 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2429 "lwz 11,0(11)\n\t" /* target->r11 */ \
2430 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2431 VALGRIND_RESTORE_STACK \
2432 "mr %0,3" \
2433 : /*out*/ "=r" (_res) \
2434 : /*in*/ "r" (&_argvec[0]) \
2435 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2436 ); \
2437 lval = (__typeof__(lval)) _res; \
2438 } while (0)
2439
2440 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2441 arg7,arg8,arg9) \
2442 do { \
2443 volatile OrigFn _orig = (orig); \
2444 volatile unsigned long _argvec[10]; \
2445 volatile unsigned long _res; \
2446 _argvec[0] = (unsigned long)_orig.nraddr; \
2447 _argvec[1] = (unsigned long)arg1; \
2448 _argvec[2] = (unsigned long)arg2; \
2449 _argvec[3] = (unsigned long)arg3; \
2450 _argvec[4] = (unsigned long)arg4; \
2451 _argvec[5] = (unsigned long)arg5; \
2452 _argvec[6] = (unsigned long)arg6; \
2453 _argvec[7] = (unsigned long)arg7; \
2454 _argvec[8] = (unsigned long)arg8; \
2455 _argvec[9] = (unsigned long)arg9; \
2456 __asm__ volatile( \
2457 VALGRIND_ALIGN_STACK \
2458 "mr 11,%1\n\t" \
2459 "addi 1,1,-16\n\t" \
2460 /* arg9 */ \
2461 "lwz 3,36(11)\n\t" \
2462 "stw 3,8(1)\n\t" \
2463 /* args1-8 */ \
2464 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2465 "lwz 4,8(11)\n\t" \
2466 "lwz 5,12(11)\n\t" \
2467 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2468 "lwz 7,20(11)\n\t" \
2469 "lwz 8,24(11)\n\t" \
2470 "lwz 9,28(11)\n\t" \
2471 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2472 "lwz 11,0(11)\n\t" /* target->r11 */ \
2473 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2474 VALGRIND_RESTORE_STACK \
2475 "mr %0,3" \
2476 : /*out*/ "=r" (_res) \
2477 : /*in*/ "r" (&_argvec[0]) \
2478 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2479 ); \
2480 lval = (__typeof__(lval)) _res; \
2481 } while (0)
2482
2483 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2484 arg7,arg8,arg9,arg10) \
2485 do { \
2486 volatile OrigFn _orig = (orig); \
2487 volatile unsigned long _argvec[11]; \
2488 volatile unsigned long _res; \
2489 _argvec[0] = (unsigned long)_orig.nraddr; \
2490 _argvec[1] = (unsigned long)arg1; \
2491 _argvec[2] = (unsigned long)arg2; \
2492 _argvec[3] = (unsigned long)arg3; \
2493 _argvec[4] = (unsigned long)arg4; \
2494 _argvec[5] = (unsigned long)arg5; \
2495 _argvec[6] = (unsigned long)arg6; \
2496 _argvec[7] = (unsigned long)arg7; \
2497 _argvec[8] = (unsigned long)arg8; \
2498 _argvec[9] = (unsigned long)arg9; \
2499 _argvec[10] = (unsigned long)arg10; \
2500 __asm__ volatile( \
2501 VALGRIND_ALIGN_STACK \
2502 "mr 11,%1\n\t" \
2503 "addi 1,1,-16\n\t" \
2504 /* arg10 */ \
2505 "lwz 3,40(11)\n\t" \
2506 "stw 3,12(1)\n\t" \
2507 /* arg9 */ \
2508 "lwz 3,36(11)\n\t" \
2509 "stw 3,8(1)\n\t" \
2510 /* args1-8 */ \
2511 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2512 "lwz 4,8(11)\n\t" \
2513 "lwz 5,12(11)\n\t" \
2514 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2515 "lwz 7,20(11)\n\t" \
2516 "lwz 8,24(11)\n\t" \
2517 "lwz 9,28(11)\n\t" \
2518 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2519 "lwz 11,0(11)\n\t" /* target->r11 */ \
2520 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2521 VALGRIND_RESTORE_STACK \
2522 "mr %0,3" \
2523 : /*out*/ "=r" (_res) \
2524 : /*in*/ "r" (&_argvec[0]) \
2525 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2526 ); \
2527 lval = (__typeof__(lval)) _res; \
2528 } while (0)
2529
2530 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2531 arg7,arg8,arg9,arg10,arg11) \
2532 do { \
2533 volatile OrigFn _orig = (orig); \
2534 volatile unsigned long _argvec[12]; \
2535 volatile unsigned long _res; \
2536 _argvec[0] = (unsigned long)_orig.nraddr; \
2537 _argvec[1] = (unsigned long)arg1; \
2538 _argvec[2] = (unsigned long)arg2; \
2539 _argvec[3] = (unsigned long)arg3; \
2540 _argvec[4] = (unsigned long)arg4; \
2541 _argvec[5] = (unsigned long)arg5; \
2542 _argvec[6] = (unsigned long)arg6; \
2543 _argvec[7] = (unsigned long)arg7; \
2544 _argvec[8] = (unsigned long)arg8; \
2545 _argvec[9] = (unsigned long)arg9; \
2546 _argvec[10] = (unsigned long)arg10; \
2547 _argvec[11] = (unsigned long)arg11; \
2548 __asm__ volatile( \
2549 VALGRIND_ALIGN_STACK \
2550 "mr 11,%1\n\t" \
2551 "addi 1,1,-32\n\t" \
2552 /* arg11 */ \
2553 "lwz 3,44(11)\n\t" \
2554 "stw 3,16(1)\n\t" \
2555 /* arg10 */ \
2556 "lwz 3,40(11)\n\t" \
2557 "stw 3,12(1)\n\t" \
2558 /* arg9 */ \
2559 "lwz 3,36(11)\n\t" \
2560 "stw 3,8(1)\n\t" \
2561 /* args1-8 */ \
2562 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2563 "lwz 4,8(11)\n\t" \
2564 "lwz 5,12(11)\n\t" \
2565 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2566 "lwz 7,20(11)\n\t" \
2567 "lwz 8,24(11)\n\t" \
2568 "lwz 9,28(11)\n\t" \
2569 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2570 "lwz 11,0(11)\n\t" /* target->r11 */ \
2571 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2572 VALGRIND_RESTORE_STACK \
2573 "mr %0,3" \
2574 : /*out*/ "=r" (_res) \
2575 : /*in*/ "r" (&_argvec[0]) \
2576 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2577 ); \
2578 lval = (__typeof__(lval)) _res; \
2579 } while (0)
2580
2581 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2582 arg7,arg8,arg9,arg10,arg11,arg12) \
2583 do { \
2584 volatile OrigFn _orig = (orig); \
2585 volatile unsigned long _argvec[13]; \
2586 volatile unsigned long _res; \
2587 _argvec[0] = (unsigned long)_orig.nraddr; \
2588 _argvec[1] = (unsigned long)arg1; \
2589 _argvec[2] = (unsigned long)arg2; \
2590 _argvec[3] = (unsigned long)arg3; \
2591 _argvec[4] = (unsigned long)arg4; \
2592 _argvec[5] = (unsigned long)arg5; \
2593 _argvec[6] = (unsigned long)arg6; \
2594 _argvec[7] = (unsigned long)arg7; \
2595 _argvec[8] = (unsigned long)arg8; \
2596 _argvec[9] = (unsigned long)arg9; \
2597 _argvec[10] = (unsigned long)arg10; \
2598 _argvec[11] = (unsigned long)arg11; \
2599 _argvec[12] = (unsigned long)arg12; \
2600 __asm__ volatile( \
2601 VALGRIND_ALIGN_STACK \
2602 "mr 11,%1\n\t" \
2603 "addi 1,1,-32\n\t" \
2604 /* arg12 */ \
2605 "lwz 3,48(11)\n\t" \
2606 "stw 3,20(1)\n\t" \
2607 /* arg11 */ \
2608 "lwz 3,44(11)\n\t" \
2609 "stw 3,16(1)\n\t" \
2610 /* arg10 */ \
2611 "lwz 3,40(11)\n\t" \
2612 "stw 3,12(1)\n\t" \
2613 /* arg9 */ \
2614 "lwz 3,36(11)\n\t" \
2615 "stw 3,8(1)\n\t" \
2616 /* args1-8 */ \
2617 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2618 "lwz 4,8(11)\n\t" \
2619 "lwz 5,12(11)\n\t" \
2620 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2621 "lwz 7,20(11)\n\t" \
2622 "lwz 8,24(11)\n\t" \
2623 "lwz 9,28(11)\n\t" \
2624 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2625 "lwz 11,0(11)\n\t" /* target->r11 */ \
2626 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2627 VALGRIND_RESTORE_STACK \
2628 "mr %0,3" \
2629 : /*out*/ "=r" (_res) \
2630 : /*in*/ "r" (&_argvec[0]) \
2631 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2632 ); \
2633 lval = (__typeof__(lval)) _res; \
2634 } while (0)
2635
2636 #endif /* PLAT_ppc32_linux */
2637
2638 /* ------------------------ ppc64-linux ------------------------ */
2639
2640 #if defined(PLAT_ppc64be_linux)
2641
2642 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2643
2644 /* These regs are trashed by the hidden call. */
2645 #define __CALLER_SAVED_REGS \
2646 "lr", "ctr", "xer", \
2647 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2648 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2649 "r11", "r12", "r13"
2650
2651 /* Macros to save and align the stack before making a function
2652 call and restore it afterwards as gcc may not keep the stack
2653 pointer aligned if it doesn't realise calls are being made
2654 to other functions. */
2655
2656 #define VALGRIND_ALIGN_STACK \
2657 "mr 28,1\n\t" \
2658 "rldicr 1,1,0,59\n\t"
2659 #define VALGRIND_RESTORE_STACK \
2660 "mr 1,28\n\t"
2661
2662 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2663 long) == 8. */
2664
2665 #define CALL_FN_W_v(lval, orig) \
2666 do { \
2667 volatile OrigFn _orig = (orig); \
2668 volatile unsigned long _argvec[3+0]; \
2669 volatile unsigned long _res; \
2670 /* _argvec[0] holds current r2 across the call */ \
2671 _argvec[1] = (unsigned long)_orig.r2; \
2672 _argvec[2] = (unsigned long)_orig.nraddr; \
2673 __asm__ volatile( \
2674 VALGRIND_ALIGN_STACK \
2675 "mr 11,%1\n\t" \
2676 "std 2,-16(11)\n\t" /* save tocptr */ \
2677 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2678 "ld 11, 0(11)\n\t" /* target->r11 */ \
2679 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2680 "mr 11,%1\n\t" \
2681 "mr %0,3\n\t" \
2682 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2683 VALGRIND_RESTORE_STACK \
2684 : /*out*/ "=r" (_res) \
2685 : /*in*/ "r" (&_argvec[2]) \
2686 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687 ); \
2688 lval = (__typeof__(lval)) _res; \
2689 } while (0)
2690
2691 #define CALL_FN_W_W(lval, orig, arg1) \
2692 do { \
2693 volatile OrigFn _orig = (orig); \
2694 volatile unsigned long _argvec[3+1]; \
2695 volatile unsigned long _res; \
2696 /* _argvec[0] holds current r2 across the call */ \
2697 _argvec[1] = (unsigned long)_orig.r2; \
2698 _argvec[2] = (unsigned long)_orig.nraddr; \
2699 _argvec[2+1] = (unsigned long)arg1; \
2700 __asm__ volatile( \
2701 VALGRIND_ALIGN_STACK \
2702 "mr 11,%1\n\t" \
2703 "std 2,-16(11)\n\t" /* save tocptr */ \
2704 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2705 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2706 "ld 11, 0(11)\n\t" /* target->r11 */ \
2707 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2708 "mr 11,%1\n\t" \
2709 "mr %0,3\n\t" \
2710 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2711 VALGRIND_RESTORE_STACK \
2712 : /*out*/ "=r" (_res) \
2713 : /*in*/ "r" (&_argvec[2]) \
2714 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2715 ); \
2716 lval = (__typeof__(lval)) _res; \
2717 } while (0)
2718
2719 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2720 do { \
2721 volatile OrigFn _orig = (orig); \
2722 volatile unsigned long _argvec[3+2]; \
2723 volatile unsigned long _res; \
2724 /* _argvec[0] holds current r2 across the call */ \
2725 _argvec[1] = (unsigned long)_orig.r2; \
2726 _argvec[2] = (unsigned long)_orig.nraddr; \
2727 _argvec[2+1] = (unsigned long)arg1; \
2728 _argvec[2+2] = (unsigned long)arg2; \
2729 __asm__ volatile( \
2730 VALGRIND_ALIGN_STACK \
2731 "mr 11,%1\n\t" \
2732 "std 2,-16(11)\n\t" /* save tocptr */ \
2733 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2734 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2735 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2736 "ld 11, 0(11)\n\t" /* target->r11 */ \
2737 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2738 "mr 11,%1\n\t" \
2739 "mr %0,3\n\t" \
2740 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2741 VALGRIND_RESTORE_STACK \
2742 : /*out*/ "=r" (_res) \
2743 : /*in*/ "r" (&_argvec[2]) \
2744 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2745 ); \
2746 lval = (__typeof__(lval)) _res; \
2747 } while (0)
2748
2749 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2750 do { \
2751 volatile OrigFn _orig = (orig); \
2752 volatile unsigned long _argvec[3+3]; \
2753 volatile unsigned long _res; \
2754 /* _argvec[0] holds current r2 across the call */ \
2755 _argvec[1] = (unsigned long)_orig.r2; \
2756 _argvec[2] = (unsigned long)_orig.nraddr; \
2757 _argvec[2+1] = (unsigned long)arg1; \
2758 _argvec[2+2] = (unsigned long)arg2; \
2759 _argvec[2+3] = (unsigned long)arg3; \
2760 __asm__ volatile( \
2761 VALGRIND_ALIGN_STACK \
2762 "mr 11,%1\n\t" \
2763 "std 2,-16(11)\n\t" /* save tocptr */ \
2764 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2765 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2766 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2767 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2768 "ld 11, 0(11)\n\t" /* target->r11 */ \
2769 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2770 "mr 11,%1\n\t" \
2771 "mr %0,3\n\t" \
2772 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2773 VALGRIND_RESTORE_STACK \
2774 : /*out*/ "=r" (_res) \
2775 : /*in*/ "r" (&_argvec[2]) \
2776 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2777 ); \
2778 lval = (__typeof__(lval)) _res; \
2779 } while (0)
2780
2781 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2782 do { \
2783 volatile OrigFn _orig = (orig); \
2784 volatile unsigned long _argvec[3+4]; \
2785 volatile unsigned long _res; \
2786 /* _argvec[0] holds current r2 across the call */ \
2787 _argvec[1] = (unsigned long)_orig.r2; \
2788 _argvec[2] = (unsigned long)_orig.nraddr; \
2789 _argvec[2+1] = (unsigned long)arg1; \
2790 _argvec[2+2] = (unsigned long)arg2; \
2791 _argvec[2+3] = (unsigned long)arg3; \
2792 _argvec[2+4] = (unsigned long)arg4; \
2793 __asm__ volatile( \
2794 VALGRIND_ALIGN_STACK \
2795 "mr 11,%1\n\t" \
2796 "std 2,-16(11)\n\t" /* save tocptr */ \
2797 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2798 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2799 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2800 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2801 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2802 "ld 11, 0(11)\n\t" /* target->r11 */ \
2803 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2804 "mr 11,%1\n\t" \
2805 "mr %0,3\n\t" \
2806 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2807 VALGRIND_RESTORE_STACK \
2808 : /*out*/ "=r" (_res) \
2809 : /*in*/ "r" (&_argvec[2]) \
2810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2811 ); \
2812 lval = (__typeof__(lval)) _res; \
2813 } while (0)
2814
2815 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2816 do { \
2817 volatile OrigFn _orig = (orig); \
2818 volatile unsigned long _argvec[3+5]; \
2819 volatile unsigned long _res; \
2820 /* _argvec[0] holds current r2 across the call */ \
2821 _argvec[1] = (unsigned long)_orig.r2; \
2822 _argvec[2] = (unsigned long)_orig.nraddr; \
2823 _argvec[2+1] = (unsigned long)arg1; \
2824 _argvec[2+2] = (unsigned long)arg2; \
2825 _argvec[2+3] = (unsigned long)arg3; \
2826 _argvec[2+4] = (unsigned long)arg4; \
2827 _argvec[2+5] = (unsigned long)arg5; \
2828 __asm__ volatile( \
2829 VALGRIND_ALIGN_STACK \
2830 "mr 11,%1\n\t" \
2831 "std 2,-16(11)\n\t" /* save tocptr */ \
2832 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2833 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2834 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2835 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2836 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2837 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2838 "ld 11, 0(11)\n\t" /* target->r11 */ \
2839 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2840 "mr 11,%1\n\t" \
2841 "mr %0,3\n\t" \
2842 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2843 VALGRIND_RESTORE_STACK \
2844 : /*out*/ "=r" (_res) \
2845 : /*in*/ "r" (&_argvec[2]) \
2846 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2847 ); \
2848 lval = (__typeof__(lval)) _res; \
2849 } while (0)
2850
2851 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2852 do { \
2853 volatile OrigFn _orig = (orig); \
2854 volatile unsigned long _argvec[3+6]; \
2855 volatile unsigned long _res; \
2856 /* _argvec[0] holds current r2 across the call */ \
2857 _argvec[1] = (unsigned long)_orig.r2; \
2858 _argvec[2] = (unsigned long)_orig.nraddr; \
2859 _argvec[2+1] = (unsigned long)arg1; \
2860 _argvec[2+2] = (unsigned long)arg2; \
2861 _argvec[2+3] = (unsigned long)arg3; \
2862 _argvec[2+4] = (unsigned long)arg4; \
2863 _argvec[2+5] = (unsigned long)arg5; \
2864 _argvec[2+6] = (unsigned long)arg6; \
2865 __asm__ volatile( \
2866 VALGRIND_ALIGN_STACK \
2867 "mr 11,%1\n\t" \
2868 "std 2,-16(11)\n\t" /* save tocptr */ \
2869 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2870 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2871 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2872 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2873 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2874 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2875 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2876 "ld 11, 0(11)\n\t" /* target->r11 */ \
2877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2878 "mr 11,%1\n\t" \
2879 "mr %0,3\n\t" \
2880 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2881 VALGRIND_RESTORE_STACK \
2882 : /*out*/ "=r" (_res) \
2883 : /*in*/ "r" (&_argvec[2]) \
2884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2885 ); \
2886 lval = (__typeof__(lval)) _res; \
2887 } while (0)
2888
2889 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2890 arg7) \
2891 do { \
2892 volatile OrigFn _orig = (orig); \
2893 volatile unsigned long _argvec[3+7]; \
2894 volatile unsigned long _res; \
2895 /* _argvec[0] holds current r2 across the call */ \
2896 _argvec[1] = (unsigned long)_orig.r2; \
2897 _argvec[2] = (unsigned long)_orig.nraddr; \
2898 _argvec[2+1] = (unsigned long)arg1; \
2899 _argvec[2+2] = (unsigned long)arg2; \
2900 _argvec[2+3] = (unsigned long)arg3; \
2901 _argvec[2+4] = (unsigned long)arg4; \
2902 _argvec[2+5] = (unsigned long)arg5; \
2903 _argvec[2+6] = (unsigned long)arg6; \
2904 _argvec[2+7] = (unsigned long)arg7; \
2905 __asm__ volatile( \
2906 VALGRIND_ALIGN_STACK \
2907 "mr 11,%1\n\t" \
2908 "std 2,-16(11)\n\t" /* save tocptr */ \
2909 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2910 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2911 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2912 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2913 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2914 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2915 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2916 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2917 "ld 11, 0(11)\n\t" /* target->r11 */ \
2918 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2919 "mr 11,%1\n\t" \
2920 "mr %0,3\n\t" \
2921 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2922 VALGRIND_RESTORE_STACK \
2923 : /*out*/ "=r" (_res) \
2924 : /*in*/ "r" (&_argvec[2]) \
2925 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2926 ); \
2927 lval = (__typeof__(lval)) _res; \
2928 } while (0)
2929
2930 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2931 arg7,arg8) \
2932 do { \
2933 volatile OrigFn _orig = (orig); \
2934 volatile unsigned long _argvec[3+8]; \
2935 volatile unsigned long _res; \
2936 /* _argvec[0] holds current r2 across the call */ \
2937 _argvec[1] = (unsigned long)_orig.r2; \
2938 _argvec[2] = (unsigned long)_orig.nraddr; \
2939 _argvec[2+1] = (unsigned long)arg1; \
2940 _argvec[2+2] = (unsigned long)arg2; \
2941 _argvec[2+3] = (unsigned long)arg3; \
2942 _argvec[2+4] = (unsigned long)arg4; \
2943 _argvec[2+5] = (unsigned long)arg5; \
2944 _argvec[2+6] = (unsigned long)arg6; \
2945 _argvec[2+7] = (unsigned long)arg7; \
2946 _argvec[2+8] = (unsigned long)arg8; \
2947 __asm__ volatile( \
2948 VALGRIND_ALIGN_STACK \
2949 "mr 11,%1\n\t" \
2950 "std 2,-16(11)\n\t" /* save tocptr */ \
2951 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2952 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2953 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2954 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2955 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2956 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2957 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2958 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2959 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2960 "ld 11, 0(11)\n\t" /* target->r11 */ \
2961 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2962 "mr 11,%1\n\t" \
2963 "mr %0,3\n\t" \
2964 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2965 VALGRIND_RESTORE_STACK \
2966 : /*out*/ "=r" (_res) \
2967 : /*in*/ "r" (&_argvec[2]) \
2968 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2969 ); \
2970 lval = (__typeof__(lval)) _res; \
2971 } while (0)
2972
2973 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2974 arg7,arg8,arg9) \
2975 do { \
2976 volatile OrigFn _orig = (orig); \
2977 volatile unsigned long _argvec[3+9]; \
2978 volatile unsigned long _res; \
2979 /* _argvec[0] holds current r2 across the call */ \
2980 _argvec[1] = (unsigned long)_orig.r2; \
2981 _argvec[2] = (unsigned long)_orig.nraddr; \
2982 _argvec[2+1] = (unsigned long)arg1; \
2983 _argvec[2+2] = (unsigned long)arg2; \
2984 _argvec[2+3] = (unsigned long)arg3; \
2985 _argvec[2+4] = (unsigned long)arg4; \
2986 _argvec[2+5] = (unsigned long)arg5; \
2987 _argvec[2+6] = (unsigned long)arg6; \
2988 _argvec[2+7] = (unsigned long)arg7; \
2989 _argvec[2+8] = (unsigned long)arg8; \
2990 _argvec[2+9] = (unsigned long)arg9; \
2991 __asm__ volatile( \
2992 VALGRIND_ALIGN_STACK \
2993 "mr 11,%1\n\t" \
2994 "std 2,-16(11)\n\t" /* save tocptr */ \
2995 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2996 "addi 1,1,-128\n\t" /* expand stack frame */ \
2997 /* arg9 */ \
2998 "ld 3,72(11)\n\t" \
2999 "std 3,112(1)\n\t" \
3000 /* args1-8 */ \
3001 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3002 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3003 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3004 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3005 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3006 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3007 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3008 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3009 "ld 11, 0(11)\n\t" /* target->r11 */ \
3010 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3011 "mr 11,%1\n\t" \
3012 "mr %0,3\n\t" \
3013 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3014 VALGRIND_RESTORE_STACK \
3015 : /*out*/ "=r" (_res) \
3016 : /*in*/ "r" (&_argvec[2]) \
3017 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3018 ); \
3019 lval = (__typeof__(lval)) _res; \
3020 } while (0)
3021
3022 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3023 arg7,arg8,arg9,arg10) \
3024 do { \
3025 volatile OrigFn _orig = (orig); \
3026 volatile unsigned long _argvec[3+10]; \
3027 volatile unsigned long _res; \
3028 /* _argvec[0] holds current r2 across the call */ \
3029 _argvec[1] = (unsigned long)_orig.r2; \
3030 _argvec[2] = (unsigned long)_orig.nraddr; \
3031 _argvec[2+1] = (unsigned long)arg1; \
3032 _argvec[2+2] = (unsigned long)arg2; \
3033 _argvec[2+3] = (unsigned long)arg3; \
3034 _argvec[2+4] = (unsigned long)arg4; \
3035 _argvec[2+5] = (unsigned long)arg5; \
3036 _argvec[2+6] = (unsigned long)arg6; \
3037 _argvec[2+7] = (unsigned long)arg7; \
3038 _argvec[2+8] = (unsigned long)arg8; \
3039 _argvec[2+9] = (unsigned long)arg9; \
3040 _argvec[2+10] = (unsigned long)arg10; \
3041 __asm__ volatile( \
3042 VALGRIND_ALIGN_STACK \
3043 "mr 11,%1\n\t" \
3044 "std 2,-16(11)\n\t" /* save tocptr */ \
3045 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3046 "addi 1,1,-128\n\t" /* expand stack frame */ \
3047 /* arg10 */ \
3048 "ld 3,80(11)\n\t" \
3049 "std 3,120(1)\n\t" \
3050 /* arg9 */ \
3051 "ld 3,72(11)\n\t" \
3052 "std 3,112(1)\n\t" \
3053 /* args1-8 */ \
3054 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3055 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3056 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3057 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3058 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3059 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3060 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3061 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3062 "ld 11, 0(11)\n\t" /* target->r11 */ \
3063 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3064 "mr 11,%1\n\t" \
3065 "mr %0,3\n\t" \
3066 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3067 VALGRIND_RESTORE_STACK \
3068 : /*out*/ "=r" (_res) \
3069 : /*in*/ "r" (&_argvec[2]) \
3070 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3071 ); \
3072 lval = (__typeof__(lval)) _res; \
3073 } while (0)
3074
3075 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3076 arg7,arg8,arg9,arg10,arg11) \
3077 do { \
3078 volatile OrigFn _orig = (orig); \
3079 volatile unsigned long _argvec[3+11]; \
3080 volatile unsigned long _res; \
3081 /* _argvec[0] holds current r2 across the call */ \
3082 _argvec[1] = (unsigned long)_orig.r2; \
3083 _argvec[2] = (unsigned long)_orig.nraddr; \
3084 _argvec[2+1] = (unsigned long)arg1; \
3085 _argvec[2+2] = (unsigned long)arg2; \
3086 _argvec[2+3] = (unsigned long)arg3; \
3087 _argvec[2+4] = (unsigned long)arg4; \
3088 _argvec[2+5] = (unsigned long)arg5; \
3089 _argvec[2+6] = (unsigned long)arg6; \
3090 _argvec[2+7] = (unsigned long)arg7; \
3091 _argvec[2+8] = (unsigned long)arg8; \
3092 _argvec[2+9] = (unsigned long)arg9; \
3093 _argvec[2+10] = (unsigned long)arg10; \
3094 _argvec[2+11] = (unsigned long)arg11; \
3095 __asm__ volatile( \
3096 VALGRIND_ALIGN_STACK \
3097 "mr 11,%1\n\t" \
3098 "std 2,-16(11)\n\t" /* save tocptr */ \
3099 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3100 "addi 1,1,-144\n\t" /* expand stack frame */ \
3101 /* arg11 */ \
3102 "ld 3,88(11)\n\t" \
3103 "std 3,128(1)\n\t" \
3104 /* arg10 */ \
3105 "ld 3,80(11)\n\t" \
3106 "std 3,120(1)\n\t" \
3107 /* arg9 */ \
3108 "ld 3,72(11)\n\t" \
3109 "std 3,112(1)\n\t" \
3110 /* args1-8 */ \
3111 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3112 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3113 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3114 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3115 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3116 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3117 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3118 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3119 "ld 11, 0(11)\n\t" /* target->r11 */ \
3120 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3121 "mr 11,%1\n\t" \
3122 "mr %0,3\n\t" \
3123 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3124 VALGRIND_RESTORE_STACK \
3125 : /*out*/ "=r" (_res) \
3126 : /*in*/ "r" (&_argvec[2]) \
3127 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3128 ); \
3129 lval = (__typeof__(lval)) _res; \
3130 } while (0)
3131
3132 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3133 arg7,arg8,arg9,arg10,arg11,arg12) \
3134 do { \
3135 volatile OrigFn _orig = (orig); \
3136 volatile unsigned long _argvec[3+12]; \
3137 volatile unsigned long _res; \
3138 /* _argvec[0] holds current r2 across the call */ \
3139 _argvec[1] = (unsigned long)_orig.r2; \
3140 _argvec[2] = (unsigned long)_orig.nraddr; \
3141 _argvec[2+1] = (unsigned long)arg1; \
3142 _argvec[2+2] = (unsigned long)arg2; \
3143 _argvec[2+3] = (unsigned long)arg3; \
3144 _argvec[2+4] = (unsigned long)arg4; \
3145 _argvec[2+5] = (unsigned long)arg5; \
3146 _argvec[2+6] = (unsigned long)arg6; \
3147 _argvec[2+7] = (unsigned long)arg7; \
3148 _argvec[2+8] = (unsigned long)arg8; \
3149 _argvec[2+9] = (unsigned long)arg9; \
3150 _argvec[2+10] = (unsigned long)arg10; \
3151 _argvec[2+11] = (unsigned long)arg11; \
3152 _argvec[2+12] = (unsigned long)arg12; \
3153 __asm__ volatile( \
3154 VALGRIND_ALIGN_STACK \
3155 "mr 11,%1\n\t" \
3156 "std 2,-16(11)\n\t" /* save tocptr */ \
3157 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3158 "addi 1,1,-144\n\t" /* expand stack frame */ \
3159 /* arg12 */ \
3160 "ld 3,96(11)\n\t" \
3161 "std 3,136(1)\n\t" \
3162 /* arg11 */ \
3163 "ld 3,88(11)\n\t" \
3164 "std 3,128(1)\n\t" \
3165 /* arg10 */ \
3166 "ld 3,80(11)\n\t" \
3167 "std 3,120(1)\n\t" \
3168 /* arg9 */ \
3169 "ld 3,72(11)\n\t" \
3170 "std 3,112(1)\n\t" \
3171 /* args1-8 */ \
3172 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3173 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3174 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3175 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3176 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3177 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3178 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3179 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3180 "ld 11, 0(11)\n\t" /* target->r11 */ \
3181 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3182 "mr 11,%1\n\t" \
3183 "mr %0,3\n\t" \
3184 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3185 VALGRIND_RESTORE_STACK \
3186 : /*out*/ "=r" (_res) \
3187 : /*in*/ "r" (&_argvec[2]) \
3188 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3189 ); \
3190 lval = (__typeof__(lval)) _res; \
3191 } while (0)
3192
3193 #endif /* PLAT_ppc64be_linux */
3194
3195 /* ------------------------- ppc64le-linux ----------------------- */
3196 #if defined(PLAT_ppc64le_linux)
3197
3198 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3199
3200 /* These regs are trashed by the hidden call. */
3201 #define __CALLER_SAVED_REGS \
3202 "lr", "ctr", "xer", \
3203 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3204 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3205 "r11", "r12", "r13"
3206
3207 /* Macros to save and align the stack before making a function
3208 call and restore it afterwards as gcc may not keep the stack
3209 pointer aligned if it doesn't realise calls are being made
3210 to other functions. */
3211
3212 #define VALGRIND_ALIGN_STACK \
3213 "mr 28,1\n\t" \
3214 "rldicr 1,1,0,59\n\t"
3215 #define VALGRIND_RESTORE_STACK \
3216 "mr 1,28\n\t"
3217
3218 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3219 long) == 8. */
3220
3221 #define CALL_FN_W_v(lval, orig) \
3222 do { \
3223 volatile OrigFn _orig = (orig); \
3224 volatile unsigned long _argvec[3+0]; \
3225 volatile unsigned long _res; \
3226 /* _argvec[0] holds current r2 across the call */ \
3227 _argvec[1] = (unsigned long)_orig.r2; \
3228 _argvec[2] = (unsigned long)_orig.nraddr; \
3229 __asm__ volatile( \
3230 VALGRIND_ALIGN_STACK \
3231 "mr 12,%1\n\t" \
3232 "std 2,-16(12)\n\t" /* save tocptr */ \
3233 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3234 "ld 12, 0(12)\n\t" /* target->r12 */ \
3235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3236 "mr 12,%1\n\t" \
3237 "mr %0,3\n\t" \
3238 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3239 VALGRIND_RESTORE_STACK \
3240 : /*out*/ "=r" (_res) \
3241 : /*in*/ "r" (&_argvec[2]) \
3242 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3243 ); \
3244 lval = (__typeof__(lval)) _res; \
3245 } while (0)
3246
3247 #define CALL_FN_W_W(lval, orig, arg1) \
3248 do { \
3249 volatile OrigFn _orig = (orig); \
3250 volatile unsigned long _argvec[3+1]; \
3251 volatile unsigned long _res; \
3252 /* _argvec[0] holds current r2 across the call */ \
3253 _argvec[1] = (unsigned long)_orig.r2; \
3254 _argvec[2] = (unsigned long)_orig.nraddr; \
3255 _argvec[2+1] = (unsigned long)arg1; \
3256 __asm__ volatile( \
3257 VALGRIND_ALIGN_STACK \
3258 "mr 12,%1\n\t" \
3259 "std 2,-16(12)\n\t" /* save tocptr */ \
3260 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3261 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3262 "ld 12, 0(12)\n\t" /* target->r12 */ \
3263 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3264 "mr 12,%1\n\t" \
3265 "mr %0,3\n\t" \
3266 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3267 VALGRIND_RESTORE_STACK \
3268 : /*out*/ "=r" (_res) \
3269 : /*in*/ "r" (&_argvec[2]) \
3270 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3271 ); \
3272 lval = (__typeof__(lval)) _res; \
3273 } while (0)
3274
3275 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3276 do { \
3277 volatile OrigFn _orig = (orig); \
3278 volatile unsigned long _argvec[3+2]; \
3279 volatile unsigned long _res; \
3280 /* _argvec[0] holds current r2 across the call */ \
3281 _argvec[1] = (unsigned long)_orig.r2; \
3282 _argvec[2] = (unsigned long)_orig.nraddr; \
3283 _argvec[2+1] = (unsigned long)arg1; \
3284 _argvec[2+2] = (unsigned long)arg2; \
3285 __asm__ volatile( \
3286 VALGRIND_ALIGN_STACK \
3287 "mr 12,%1\n\t" \
3288 "std 2,-16(12)\n\t" /* save tocptr */ \
3289 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3290 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3291 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3292 "ld 12, 0(12)\n\t" /* target->r12 */ \
3293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3294 "mr 12,%1\n\t" \
3295 "mr %0,3\n\t" \
3296 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3297 VALGRIND_RESTORE_STACK \
3298 : /*out*/ "=r" (_res) \
3299 : /*in*/ "r" (&_argvec[2]) \
3300 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3301 ); \
3302 lval = (__typeof__(lval)) _res; \
3303 } while (0)
3304
3305 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3306 do { \
3307 volatile OrigFn _orig = (orig); \
3308 volatile unsigned long _argvec[3+3]; \
3309 volatile unsigned long _res; \
3310 /* _argvec[0] holds current r2 across the call */ \
3311 _argvec[1] = (unsigned long)_orig.r2; \
3312 _argvec[2] = (unsigned long)_orig.nraddr; \
3313 _argvec[2+1] = (unsigned long)arg1; \
3314 _argvec[2+2] = (unsigned long)arg2; \
3315 _argvec[2+3] = (unsigned long)arg3; \
3316 __asm__ volatile( \
3317 VALGRIND_ALIGN_STACK \
3318 "mr 12,%1\n\t" \
3319 "std 2,-16(12)\n\t" /* save tocptr */ \
3320 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3321 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3322 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3323 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3324 "ld 12, 0(12)\n\t" /* target->r12 */ \
3325 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3326 "mr 12,%1\n\t" \
3327 "mr %0,3\n\t" \
3328 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3329 VALGRIND_RESTORE_STACK \
3330 : /*out*/ "=r" (_res) \
3331 : /*in*/ "r" (&_argvec[2]) \
3332 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3333 ); \
3334 lval = (__typeof__(lval)) _res; \
3335 } while (0)
3336
3337 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3338 do { \
3339 volatile OrigFn _orig = (orig); \
3340 volatile unsigned long _argvec[3+4]; \
3341 volatile unsigned long _res; \
3342 /* _argvec[0] holds current r2 across the call */ \
3343 _argvec[1] = (unsigned long)_orig.r2; \
3344 _argvec[2] = (unsigned long)_orig.nraddr; \
3345 _argvec[2+1] = (unsigned long)arg1; \
3346 _argvec[2+2] = (unsigned long)arg2; \
3347 _argvec[2+3] = (unsigned long)arg3; \
3348 _argvec[2+4] = (unsigned long)arg4; \
3349 __asm__ volatile( \
3350 VALGRIND_ALIGN_STACK \
3351 "mr 12,%1\n\t" \
3352 "std 2,-16(12)\n\t" /* save tocptr */ \
3353 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3354 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3355 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3356 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3357 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3358 "ld 12, 0(12)\n\t" /* target->r12 */ \
3359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3360 "mr 12,%1\n\t" \
3361 "mr %0,3\n\t" \
3362 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3363 VALGRIND_RESTORE_STACK \
3364 : /*out*/ "=r" (_res) \
3365 : /*in*/ "r" (&_argvec[2]) \
3366 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3367 ); \
3368 lval = (__typeof__(lval)) _res; \
3369 } while (0)
3370
3371 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3372 do { \
3373 volatile OrigFn _orig = (orig); \
3374 volatile unsigned long _argvec[3+5]; \
3375 volatile unsigned long _res; \
3376 /* _argvec[0] holds current r2 across the call */ \
3377 _argvec[1] = (unsigned long)_orig.r2; \
3378 _argvec[2] = (unsigned long)_orig.nraddr; \
3379 _argvec[2+1] = (unsigned long)arg1; \
3380 _argvec[2+2] = (unsigned long)arg2; \
3381 _argvec[2+3] = (unsigned long)arg3; \
3382 _argvec[2+4] = (unsigned long)arg4; \
3383 _argvec[2+5] = (unsigned long)arg5; \
3384 __asm__ volatile( \
3385 VALGRIND_ALIGN_STACK \
3386 "mr 12,%1\n\t" \
3387 "std 2,-16(12)\n\t" /* save tocptr */ \
3388 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3389 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3390 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3391 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3392 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3393 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3394 "ld 12, 0(12)\n\t" /* target->r12 */ \
3395 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3396 "mr 12,%1\n\t" \
3397 "mr %0,3\n\t" \
3398 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3399 VALGRIND_RESTORE_STACK \
3400 : /*out*/ "=r" (_res) \
3401 : /*in*/ "r" (&_argvec[2]) \
3402 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3403 ); \
3404 lval = (__typeof__(lval)) _res; \
3405 } while (0)
3406
3407 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3408 do { \
3409 volatile OrigFn _orig = (orig); \
3410 volatile unsigned long _argvec[3+6]; \
3411 volatile unsigned long _res; \
3412 /* _argvec[0] holds current r2 across the call */ \
3413 _argvec[1] = (unsigned long)_orig.r2; \
3414 _argvec[2] = (unsigned long)_orig.nraddr; \
3415 _argvec[2+1] = (unsigned long)arg1; \
3416 _argvec[2+2] = (unsigned long)arg2; \
3417 _argvec[2+3] = (unsigned long)arg3; \
3418 _argvec[2+4] = (unsigned long)arg4; \
3419 _argvec[2+5] = (unsigned long)arg5; \
3420 _argvec[2+6] = (unsigned long)arg6; \
3421 __asm__ volatile( \
3422 VALGRIND_ALIGN_STACK \
3423 "mr 12,%1\n\t" \
3424 "std 2,-16(12)\n\t" /* save tocptr */ \
3425 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3426 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3427 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3428 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3429 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3430 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3431 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3432 "ld 12, 0(12)\n\t" /* target->r12 */ \
3433 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3434 "mr 12,%1\n\t" \
3435 "mr %0,3\n\t" \
3436 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3437 VALGRIND_RESTORE_STACK \
3438 : /*out*/ "=r" (_res) \
3439 : /*in*/ "r" (&_argvec[2]) \
3440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3441 ); \
3442 lval = (__typeof__(lval)) _res; \
3443 } while (0)
3444
3445 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3446 arg7) \
3447 do { \
3448 volatile OrigFn _orig = (orig); \
3449 volatile unsigned long _argvec[3+7]; \
3450 volatile unsigned long _res; \
3451 /* _argvec[0] holds current r2 across the call */ \
3452 _argvec[1] = (unsigned long)_orig.r2; \
3453 _argvec[2] = (unsigned long)_orig.nraddr; \
3454 _argvec[2+1] = (unsigned long)arg1; \
3455 _argvec[2+2] = (unsigned long)arg2; \
3456 _argvec[2+3] = (unsigned long)arg3; \
3457 _argvec[2+4] = (unsigned long)arg4; \
3458 _argvec[2+5] = (unsigned long)arg5; \
3459 _argvec[2+6] = (unsigned long)arg6; \
3460 _argvec[2+7] = (unsigned long)arg7; \
3461 __asm__ volatile( \
3462 VALGRIND_ALIGN_STACK \
3463 "mr 12,%1\n\t" \
3464 "std 2,-16(12)\n\t" /* save tocptr */ \
3465 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3466 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3467 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3468 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3469 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3470 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3471 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3472 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3473 "ld 12, 0(12)\n\t" /* target->r12 */ \
3474 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3475 "mr 12,%1\n\t" \
3476 "mr %0,3\n\t" \
3477 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3478 VALGRIND_RESTORE_STACK \
3479 : /*out*/ "=r" (_res) \
3480 : /*in*/ "r" (&_argvec[2]) \
3481 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3482 ); \
3483 lval = (__typeof__(lval)) _res; \
3484 } while (0)
3485
3486 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3487 arg7,arg8) \
3488 do { \
3489 volatile OrigFn _orig = (orig); \
3490 volatile unsigned long _argvec[3+8]; \
3491 volatile unsigned long _res; \
3492 /* _argvec[0] holds current r2 across the call */ \
3493 _argvec[1] = (unsigned long)_orig.r2; \
3494 _argvec[2] = (unsigned long)_orig.nraddr; \
3495 _argvec[2+1] = (unsigned long)arg1; \
3496 _argvec[2+2] = (unsigned long)arg2; \
3497 _argvec[2+3] = (unsigned long)arg3; \
3498 _argvec[2+4] = (unsigned long)arg4; \
3499 _argvec[2+5] = (unsigned long)arg5; \
3500 _argvec[2+6] = (unsigned long)arg6; \
3501 _argvec[2+7] = (unsigned long)arg7; \
3502 _argvec[2+8] = (unsigned long)arg8; \
3503 __asm__ volatile( \
3504 VALGRIND_ALIGN_STACK \
3505 "mr 12,%1\n\t" \
3506 "std 2,-16(12)\n\t" /* save tocptr */ \
3507 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3508 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3509 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3510 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3511 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3512 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3513 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3514 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3515 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3516 "ld 12, 0(12)\n\t" /* target->r12 */ \
3517 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3518 "mr 12,%1\n\t" \
3519 "mr %0,3\n\t" \
3520 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3521 VALGRIND_RESTORE_STACK \
3522 : /*out*/ "=r" (_res) \
3523 : /*in*/ "r" (&_argvec[2]) \
3524 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3525 ); \
3526 lval = (__typeof__(lval)) _res; \
3527 } while (0)
3528
3529 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3530 arg7,arg8,arg9) \
3531 do { \
3532 volatile OrigFn _orig = (orig); \
3533 volatile unsigned long _argvec[3+9]; \
3534 volatile unsigned long _res; \
3535 /* _argvec[0] holds current r2 across the call */ \
3536 _argvec[1] = (unsigned long)_orig.r2; \
3537 _argvec[2] = (unsigned long)_orig.nraddr; \
3538 _argvec[2+1] = (unsigned long)arg1; \
3539 _argvec[2+2] = (unsigned long)arg2; \
3540 _argvec[2+3] = (unsigned long)arg3; \
3541 _argvec[2+4] = (unsigned long)arg4; \
3542 _argvec[2+5] = (unsigned long)arg5; \
3543 _argvec[2+6] = (unsigned long)arg6; \
3544 _argvec[2+7] = (unsigned long)arg7; \
3545 _argvec[2+8] = (unsigned long)arg8; \
3546 _argvec[2+9] = (unsigned long)arg9; \
3547 __asm__ volatile( \
3548 VALGRIND_ALIGN_STACK \
3549 "mr 12,%1\n\t" \
3550 "std 2,-16(12)\n\t" /* save tocptr */ \
3551 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3552 "addi 1,1,-128\n\t" /* expand stack frame */ \
3553 /* arg9 */ \
3554 "ld 3,72(12)\n\t" \
3555 "std 3,96(1)\n\t" \
3556 /* args1-8 */ \
3557 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3558 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3559 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3560 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3561 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3562 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3563 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3564 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3565 "ld 12, 0(12)\n\t" /* target->r12 */ \
3566 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3567 "mr 12,%1\n\t" \
3568 "mr %0,3\n\t" \
3569 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3570 VALGRIND_RESTORE_STACK \
3571 : /*out*/ "=r" (_res) \
3572 : /*in*/ "r" (&_argvec[2]) \
3573 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3574 ); \
3575 lval = (__typeof__(lval)) _res; \
3576 } while (0)
3577
3578 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3579 arg7,arg8,arg9,arg10) \
3580 do { \
3581 volatile OrigFn _orig = (orig); \
3582 volatile unsigned long _argvec[3+10]; \
3583 volatile unsigned long _res; \
3584 /* _argvec[0] holds current r2 across the call */ \
3585 _argvec[1] = (unsigned long)_orig.r2; \
3586 _argvec[2] = (unsigned long)_orig.nraddr; \
3587 _argvec[2+1] = (unsigned long)arg1; \
3588 _argvec[2+2] = (unsigned long)arg2; \
3589 _argvec[2+3] = (unsigned long)arg3; \
3590 _argvec[2+4] = (unsigned long)arg4; \
3591 _argvec[2+5] = (unsigned long)arg5; \
3592 _argvec[2+6] = (unsigned long)arg6; \
3593 _argvec[2+7] = (unsigned long)arg7; \
3594 _argvec[2+8] = (unsigned long)arg8; \
3595 _argvec[2+9] = (unsigned long)arg9; \
3596 _argvec[2+10] = (unsigned long)arg10; \
3597 __asm__ volatile( \
3598 VALGRIND_ALIGN_STACK \
3599 "mr 12,%1\n\t" \
3600 "std 2,-16(12)\n\t" /* save tocptr */ \
3601 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3602 "addi 1,1,-128\n\t" /* expand stack frame */ \
3603 /* arg10 */ \
3604 "ld 3,80(12)\n\t" \
3605 "std 3,104(1)\n\t" \
3606 /* arg9 */ \
3607 "ld 3,72(12)\n\t" \
3608 "std 3,96(1)\n\t" \
3609 /* args1-8 */ \
3610 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3611 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3612 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3613 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3614 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3615 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3616 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3617 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3618 "ld 12, 0(12)\n\t" /* target->r12 */ \
3619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3620 "mr 12,%1\n\t" \
3621 "mr %0,3\n\t" \
3622 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3623 VALGRIND_RESTORE_STACK \
3624 : /*out*/ "=r" (_res) \
3625 : /*in*/ "r" (&_argvec[2]) \
3626 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3627 ); \
3628 lval = (__typeof__(lval)) _res; \
3629 } while (0)
3630
3631 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3632 arg7,arg8,arg9,arg10,arg11) \
3633 do { \
3634 volatile OrigFn _orig = (orig); \
3635 volatile unsigned long _argvec[3+11]; \
3636 volatile unsigned long _res; \
3637 /* _argvec[0] holds current r2 across the call */ \
3638 _argvec[1] = (unsigned long)_orig.r2; \
3639 _argvec[2] = (unsigned long)_orig.nraddr; \
3640 _argvec[2+1] = (unsigned long)arg1; \
3641 _argvec[2+2] = (unsigned long)arg2; \
3642 _argvec[2+3] = (unsigned long)arg3; \
3643 _argvec[2+4] = (unsigned long)arg4; \
3644 _argvec[2+5] = (unsigned long)arg5; \
3645 _argvec[2+6] = (unsigned long)arg6; \
3646 _argvec[2+7] = (unsigned long)arg7; \
3647 _argvec[2+8] = (unsigned long)arg8; \
3648 _argvec[2+9] = (unsigned long)arg9; \
3649 _argvec[2+10] = (unsigned long)arg10; \
3650 _argvec[2+11] = (unsigned long)arg11; \
3651 __asm__ volatile( \
3652 VALGRIND_ALIGN_STACK \
3653 "mr 12,%1\n\t" \
3654 "std 2,-16(12)\n\t" /* save tocptr */ \
3655 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3656 "addi 1,1,-144\n\t" /* expand stack frame */ \
3657 /* arg11 */ \
3658 "ld 3,88(12)\n\t" \
3659 "std 3,112(1)\n\t" \
3660 /* arg10 */ \
3661 "ld 3,80(12)\n\t" \
3662 "std 3,104(1)\n\t" \
3663 /* arg9 */ \
3664 "ld 3,72(12)\n\t" \
3665 "std 3,96(1)\n\t" \
3666 /* args1-8 */ \
3667 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3668 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3669 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3670 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3671 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3672 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3673 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3674 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3675 "ld 12, 0(12)\n\t" /* target->r12 */ \
3676 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3677 "mr 12,%1\n\t" \
3678 "mr %0,3\n\t" \
3679 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3680 VALGRIND_RESTORE_STACK \
3681 : /*out*/ "=r" (_res) \
3682 : /*in*/ "r" (&_argvec[2]) \
3683 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3684 ); \
3685 lval = (__typeof__(lval)) _res; \
3686 } while (0)
3687
3688 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3689 arg7,arg8,arg9,arg10,arg11,arg12) \
3690 do { \
3691 volatile OrigFn _orig = (orig); \
3692 volatile unsigned long _argvec[3+12]; \
3693 volatile unsigned long _res; \
3694 /* _argvec[0] holds current r2 across the call */ \
3695 _argvec[1] = (unsigned long)_orig.r2; \
3696 _argvec[2] = (unsigned long)_orig.nraddr; \
3697 _argvec[2+1] = (unsigned long)arg1; \
3698 _argvec[2+2] = (unsigned long)arg2; \
3699 _argvec[2+3] = (unsigned long)arg3; \
3700 _argvec[2+4] = (unsigned long)arg4; \
3701 _argvec[2+5] = (unsigned long)arg5; \
3702 _argvec[2+6] = (unsigned long)arg6; \
3703 _argvec[2+7] = (unsigned long)arg7; \
3704 _argvec[2+8] = (unsigned long)arg8; \
3705 _argvec[2+9] = (unsigned long)arg9; \
3706 _argvec[2+10] = (unsigned long)arg10; \
3707 _argvec[2+11] = (unsigned long)arg11; \
3708 _argvec[2+12] = (unsigned long)arg12; \
3709 __asm__ volatile( \
3710 VALGRIND_ALIGN_STACK \
3711 "mr 12,%1\n\t" \
3712 "std 2,-16(12)\n\t" /* save tocptr */ \
3713 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3714 "addi 1,1,-144\n\t" /* expand stack frame */ \
3715 /* arg12 */ \
3716 "ld 3,96(12)\n\t" \
3717 "std 3,120(1)\n\t" \
3718 /* arg11 */ \
3719 "ld 3,88(12)\n\t" \
3720 "std 3,112(1)\n\t" \
3721 /* arg10 */ \
3722 "ld 3,80(12)\n\t" \
3723 "std 3,104(1)\n\t" \
3724 /* arg9 */ \
3725 "ld 3,72(12)\n\t" \
3726 "std 3,96(1)\n\t" \
3727 /* args1-8 */ \
3728 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3729 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3730 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3731 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3732 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3733 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3734 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3735 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3736 "ld 12, 0(12)\n\t" /* target->r12 */ \
3737 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3738 "mr 12,%1\n\t" \
3739 "mr %0,3\n\t" \
3740 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3741 VALGRIND_RESTORE_STACK \
3742 : /*out*/ "=r" (_res) \
3743 : /*in*/ "r" (&_argvec[2]) \
3744 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3745 ); \
3746 lval = (__typeof__(lval)) _res; \
3747 } while (0)
3748
3749 #endif /* PLAT_ppc64le_linux */
3750
3751 /* ------------------------- arm-linux ------------------------- */
3752
3753 #if defined(PLAT_arm_linux)
3754
3755 /* These regs are trashed by the hidden call. */
3756 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3757
3758 /* Macros to save and align the stack before making a function
3759 call and restore it afterwards as gcc may not keep the stack
3760 pointer aligned if it doesn't realise calls are being made
3761 to other functions. */
3762
3763 /* This is a bit tricky. We store the original stack pointer in r10
3764 as it is callee-saves. gcc doesn't allow the use of r11 for some
3765 reason. Also, we can't directly "bic" the stack pointer in thumb
3766 mode since r13 isn't an allowed register number in that context.
3767 So use r4 as a temporary, since that is about to get trashed
3768 anyway, just after each use of this macro. Side effect is we need
3769 to be very careful about any future changes, since
3770 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3771 #define VALGRIND_ALIGN_STACK \
3772 "mov r10, sp\n\t" \
3773 "mov r4, sp\n\t" \
3774 "bic r4, r4, #7\n\t" \
3775 "mov sp, r4\n\t"
3776 #define VALGRIND_RESTORE_STACK \
3777 "mov sp, r10\n\t"
3778
3779 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3780 long) == 4. */
3781
3782 #define CALL_FN_W_v(lval, orig) \
3783 do { \
3784 volatile OrigFn _orig = (orig); \
3785 volatile unsigned long _argvec[1]; \
3786 volatile unsigned long _res; \
3787 _argvec[0] = (unsigned long)_orig.nraddr; \
3788 __asm__ volatile( \
3789 VALGRIND_ALIGN_STACK \
3790 "ldr r4, [%1] \n\t" /* target->r4 */ \
3791 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3792 VALGRIND_RESTORE_STACK \
3793 "mov %0, r0\n" \
3794 : /*out*/ "=r" (_res) \
3795 : /*in*/ "0" (&_argvec[0]) \
3796 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3797 ); \
3798 lval = (__typeof__(lval)) _res; \
3799 } while (0)
3800
3801 #define CALL_FN_W_W(lval, orig, arg1) \
3802 do { \
3803 volatile OrigFn _orig = (orig); \
3804 volatile unsigned long _argvec[2]; \
3805 volatile unsigned long _res; \
3806 _argvec[0] = (unsigned long)_orig.nraddr; \
3807 _argvec[1] = (unsigned long)(arg1); \
3808 __asm__ volatile( \
3809 VALGRIND_ALIGN_STACK \
3810 "ldr r0, [%1, #4] \n\t" \
3811 "ldr r4, [%1] \n\t" /* target->r4 */ \
3812 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3813 VALGRIND_RESTORE_STACK \
3814 "mov %0, r0\n" \
3815 : /*out*/ "=r" (_res) \
3816 : /*in*/ "0" (&_argvec[0]) \
3817 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3818 ); \
3819 lval = (__typeof__(lval)) _res; \
3820 } while (0)
3821
3822 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3823 do { \
3824 volatile OrigFn _orig = (orig); \
3825 volatile unsigned long _argvec[3]; \
3826 volatile unsigned long _res; \
3827 _argvec[0] = (unsigned long)_orig.nraddr; \
3828 _argvec[1] = (unsigned long)(arg1); \
3829 _argvec[2] = (unsigned long)(arg2); \
3830 __asm__ volatile( \
3831 VALGRIND_ALIGN_STACK \
3832 "ldr r0, [%1, #4] \n\t" \
3833 "ldr r1, [%1, #8] \n\t" \
3834 "ldr r4, [%1] \n\t" /* target->r4 */ \
3835 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3836 VALGRIND_RESTORE_STACK \
3837 "mov %0, r0\n" \
3838 : /*out*/ "=r" (_res) \
3839 : /*in*/ "0" (&_argvec[0]) \
3840 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3841 ); \
3842 lval = (__typeof__(lval)) _res; \
3843 } while (0)
3844
3845 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3846 do { \
3847 volatile OrigFn _orig = (orig); \
3848 volatile unsigned long _argvec[4]; \
3849 volatile unsigned long _res; \
3850 _argvec[0] = (unsigned long)_orig.nraddr; \
3851 _argvec[1] = (unsigned long)(arg1); \
3852 _argvec[2] = (unsigned long)(arg2); \
3853 _argvec[3] = (unsigned long)(arg3); \
3854 __asm__ volatile( \
3855 VALGRIND_ALIGN_STACK \
3856 "ldr r0, [%1, #4] \n\t" \
3857 "ldr r1, [%1, #8] \n\t" \
3858 "ldr r2, [%1, #12] \n\t" \
3859 "ldr r4, [%1] \n\t" /* target->r4 */ \
3860 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3861 VALGRIND_RESTORE_STACK \
3862 "mov %0, r0\n" \
3863 : /*out*/ "=r" (_res) \
3864 : /*in*/ "0" (&_argvec[0]) \
3865 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3866 ); \
3867 lval = (__typeof__(lval)) _res; \
3868 } while (0)
3869
3870 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3871 do { \
3872 volatile OrigFn _orig = (orig); \
3873 volatile unsigned long _argvec[5]; \
3874 volatile unsigned long _res; \
3875 _argvec[0] = (unsigned long)_orig.nraddr; \
3876 _argvec[1] = (unsigned long)(arg1); \
3877 _argvec[2] = (unsigned long)(arg2); \
3878 _argvec[3] = (unsigned long)(arg3); \
3879 _argvec[4] = (unsigned long)(arg4); \
3880 __asm__ volatile( \
3881 VALGRIND_ALIGN_STACK \
3882 "ldr r0, [%1, #4] \n\t" \
3883 "ldr r1, [%1, #8] \n\t" \
3884 "ldr r2, [%1, #12] \n\t" \
3885 "ldr r3, [%1, #16] \n\t" \
3886 "ldr r4, [%1] \n\t" /* target->r4 */ \
3887 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3888 VALGRIND_RESTORE_STACK \
3889 "mov %0, r0" \
3890 : /*out*/ "=r" (_res) \
3891 : /*in*/ "0" (&_argvec[0]) \
3892 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3893 ); \
3894 lval = (__typeof__(lval)) _res; \
3895 } while (0)
3896
3897 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3898 do { \
3899 volatile OrigFn _orig = (orig); \
3900 volatile unsigned long _argvec[6]; \
3901 volatile unsigned long _res; \
3902 _argvec[0] = (unsigned long)_orig.nraddr; \
3903 _argvec[1] = (unsigned long)(arg1); \
3904 _argvec[2] = (unsigned long)(arg2); \
3905 _argvec[3] = (unsigned long)(arg3); \
3906 _argvec[4] = (unsigned long)(arg4); \
3907 _argvec[5] = (unsigned long)(arg5); \
3908 __asm__ volatile( \
3909 VALGRIND_ALIGN_STACK \
3910 "sub sp, sp, #4 \n\t" \
3911 "ldr r0, [%1, #20] \n\t" \
3912 "push {r0} \n\t" \
3913 "ldr r0, [%1, #4] \n\t" \
3914 "ldr r1, [%1, #8] \n\t" \
3915 "ldr r2, [%1, #12] \n\t" \
3916 "ldr r3, [%1, #16] \n\t" \
3917 "ldr r4, [%1] \n\t" /* target->r4 */ \
3918 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3919 VALGRIND_RESTORE_STACK \
3920 "mov %0, r0" \
3921 : /*out*/ "=r" (_res) \
3922 : /*in*/ "0" (&_argvec[0]) \
3923 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3924 ); \
3925 lval = (__typeof__(lval)) _res; \
3926 } while (0)
3927
3928 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3929 do { \
3930 volatile OrigFn _orig = (orig); \
3931 volatile unsigned long _argvec[7]; \
3932 volatile unsigned long _res; \
3933 _argvec[0] = (unsigned long)_orig.nraddr; \
3934 _argvec[1] = (unsigned long)(arg1); \
3935 _argvec[2] = (unsigned long)(arg2); \
3936 _argvec[3] = (unsigned long)(arg3); \
3937 _argvec[4] = (unsigned long)(arg4); \
3938 _argvec[5] = (unsigned long)(arg5); \
3939 _argvec[6] = (unsigned long)(arg6); \
3940 __asm__ volatile( \
3941 VALGRIND_ALIGN_STACK \
3942 "ldr r0, [%1, #20] \n\t" \
3943 "ldr r1, [%1, #24] \n\t" \
3944 "push {r0, r1} \n\t" \
3945 "ldr r0, [%1, #4] \n\t" \
3946 "ldr r1, [%1, #8] \n\t" \
3947 "ldr r2, [%1, #12] \n\t" \
3948 "ldr r3, [%1, #16] \n\t" \
3949 "ldr r4, [%1] \n\t" /* target->r4 */ \
3950 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3951 VALGRIND_RESTORE_STACK \
3952 "mov %0, r0" \
3953 : /*out*/ "=r" (_res) \
3954 : /*in*/ "0" (&_argvec[0]) \
3955 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3956 ); \
3957 lval = (__typeof__(lval)) _res; \
3958 } while (0)
3959
3960 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3961 arg7) \
3962 do { \
3963 volatile OrigFn _orig = (orig); \
3964 volatile unsigned long _argvec[8]; \
3965 volatile unsigned long _res; \
3966 _argvec[0] = (unsigned long)_orig.nraddr; \
3967 _argvec[1] = (unsigned long)(arg1); \
3968 _argvec[2] = (unsigned long)(arg2); \
3969 _argvec[3] = (unsigned long)(arg3); \
3970 _argvec[4] = (unsigned long)(arg4); \
3971 _argvec[5] = (unsigned long)(arg5); \
3972 _argvec[6] = (unsigned long)(arg6); \
3973 _argvec[7] = (unsigned long)(arg7); \
3974 __asm__ volatile( \
3975 VALGRIND_ALIGN_STACK \
3976 "sub sp, sp, #4 \n\t" \
3977 "ldr r0, [%1, #20] \n\t" \
3978 "ldr r1, [%1, #24] \n\t" \
3979 "ldr r2, [%1, #28] \n\t" \
3980 "push {r0, r1, r2} \n\t" \
3981 "ldr r0, [%1, #4] \n\t" \
3982 "ldr r1, [%1, #8] \n\t" \
3983 "ldr r2, [%1, #12] \n\t" \
3984 "ldr r3, [%1, #16] \n\t" \
3985 "ldr r4, [%1] \n\t" /* target->r4 */ \
3986 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3987 VALGRIND_RESTORE_STACK \
3988 "mov %0, r0" \
3989 : /*out*/ "=r" (_res) \
3990 : /*in*/ "0" (&_argvec[0]) \
3991 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3992 ); \
3993 lval = (__typeof__(lval)) _res; \
3994 } while (0)
3995
3996 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3997 arg7,arg8) \
3998 do { \
3999 volatile OrigFn _orig = (orig); \
4000 volatile unsigned long _argvec[9]; \
4001 volatile unsigned long _res; \
4002 _argvec[0] = (unsigned long)_orig.nraddr; \
4003 _argvec[1] = (unsigned long)(arg1); \
4004 _argvec[2] = (unsigned long)(arg2); \
4005 _argvec[3] = (unsigned long)(arg3); \
4006 _argvec[4] = (unsigned long)(arg4); \
4007 _argvec[5] = (unsigned long)(arg5); \
4008 _argvec[6] = (unsigned long)(arg6); \
4009 _argvec[7] = (unsigned long)(arg7); \
4010 _argvec[8] = (unsigned long)(arg8); \
4011 __asm__ volatile( \
4012 VALGRIND_ALIGN_STACK \
4013 "ldr r0, [%1, #20] \n\t" \
4014 "ldr r1, [%1, #24] \n\t" \
4015 "ldr r2, [%1, #28] \n\t" \
4016 "ldr r3, [%1, #32] \n\t" \
4017 "push {r0, r1, r2, r3} \n\t" \
4018 "ldr r0, [%1, #4] \n\t" \
4019 "ldr r1, [%1, #8] \n\t" \
4020 "ldr r2, [%1, #12] \n\t" \
4021 "ldr r3, [%1, #16] \n\t" \
4022 "ldr r4, [%1] \n\t" /* target->r4 */ \
4023 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4024 VALGRIND_RESTORE_STACK \
4025 "mov %0, r0" \
4026 : /*out*/ "=r" (_res) \
4027 : /*in*/ "0" (&_argvec[0]) \
4028 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4029 ); \
4030 lval = (__typeof__(lval)) _res; \
4031 } while (0)
4032
4033 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4034 arg7,arg8,arg9) \
4035 do { \
4036 volatile OrigFn _orig = (orig); \
4037 volatile unsigned long _argvec[10]; \
4038 volatile unsigned long _res; \
4039 _argvec[0] = (unsigned long)_orig.nraddr; \
4040 _argvec[1] = (unsigned long)(arg1); \
4041 _argvec[2] = (unsigned long)(arg2); \
4042 _argvec[3] = (unsigned long)(arg3); \
4043 _argvec[4] = (unsigned long)(arg4); \
4044 _argvec[5] = (unsigned long)(arg5); \
4045 _argvec[6] = (unsigned long)(arg6); \
4046 _argvec[7] = (unsigned long)(arg7); \
4047 _argvec[8] = (unsigned long)(arg8); \
4048 _argvec[9] = (unsigned long)(arg9); \
4049 __asm__ volatile( \
4050 VALGRIND_ALIGN_STACK \
4051 "sub sp, sp, #4 \n\t" \
4052 "ldr r0, [%1, #20] \n\t" \
4053 "ldr r1, [%1, #24] \n\t" \
4054 "ldr r2, [%1, #28] \n\t" \
4055 "ldr r3, [%1, #32] \n\t" \
4056 "ldr r4, [%1, #36] \n\t" \
4057 "push {r0, r1, r2, r3, r4} \n\t" \
4058 "ldr r0, [%1, #4] \n\t" \
4059 "ldr r1, [%1, #8] \n\t" \
4060 "ldr r2, [%1, #12] \n\t" \
4061 "ldr r3, [%1, #16] \n\t" \
4062 "ldr r4, [%1] \n\t" /* target->r4 */ \
4063 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4064 VALGRIND_RESTORE_STACK \
4065 "mov %0, r0" \
4066 : /*out*/ "=r" (_res) \
4067 : /*in*/ "0" (&_argvec[0]) \
4068 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4069 ); \
4070 lval = (__typeof__(lval)) _res; \
4071 } while (0)
4072
4073 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4074 arg7,arg8,arg9,arg10) \
4075 do { \
4076 volatile OrigFn _orig = (orig); \
4077 volatile unsigned long _argvec[11]; \
4078 volatile unsigned long _res; \
4079 _argvec[0] = (unsigned long)_orig.nraddr; \
4080 _argvec[1] = (unsigned long)(arg1); \
4081 _argvec[2] = (unsigned long)(arg2); \
4082 _argvec[3] = (unsigned long)(arg3); \
4083 _argvec[4] = (unsigned long)(arg4); \
4084 _argvec[5] = (unsigned long)(arg5); \
4085 _argvec[6] = (unsigned long)(arg6); \
4086 _argvec[7] = (unsigned long)(arg7); \
4087 _argvec[8] = (unsigned long)(arg8); \
4088 _argvec[9] = (unsigned long)(arg9); \
4089 _argvec[10] = (unsigned long)(arg10); \
4090 __asm__ volatile( \
4091 VALGRIND_ALIGN_STACK \
4092 "ldr r0, [%1, #40] \n\t" \
4093 "push {r0} \n\t" \
4094 "ldr r0, [%1, #20] \n\t" \
4095 "ldr r1, [%1, #24] \n\t" \
4096 "ldr r2, [%1, #28] \n\t" \
4097 "ldr r3, [%1, #32] \n\t" \
4098 "ldr r4, [%1, #36] \n\t" \
4099 "push {r0, r1, r2, r3, r4} \n\t" \
4100 "ldr r0, [%1, #4] \n\t" \
4101 "ldr r1, [%1, #8] \n\t" \
4102 "ldr r2, [%1, #12] \n\t" \
4103 "ldr r3, [%1, #16] \n\t" \
4104 "ldr r4, [%1] \n\t" /* target->r4 */ \
4105 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4106 VALGRIND_RESTORE_STACK \
4107 "mov %0, r0" \
4108 : /*out*/ "=r" (_res) \
4109 : /*in*/ "0" (&_argvec[0]) \
4110 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4111 ); \
4112 lval = (__typeof__(lval)) _res; \
4113 } while (0)
4114
4115 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4116 arg6,arg7,arg8,arg9,arg10, \
4117 arg11) \
4118 do { \
4119 volatile OrigFn _orig = (orig); \
4120 volatile unsigned long _argvec[12]; \
4121 volatile unsigned long _res; \
4122 _argvec[0] = (unsigned long)_orig.nraddr; \
4123 _argvec[1] = (unsigned long)(arg1); \
4124 _argvec[2] = (unsigned long)(arg2); \
4125 _argvec[3] = (unsigned long)(arg3); \
4126 _argvec[4] = (unsigned long)(arg4); \
4127 _argvec[5] = (unsigned long)(arg5); \
4128 _argvec[6] = (unsigned long)(arg6); \
4129 _argvec[7] = (unsigned long)(arg7); \
4130 _argvec[8] = (unsigned long)(arg8); \
4131 _argvec[9] = (unsigned long)(arg9); \
4132 _argvec[10] = (unsigned long)(arg10); \
4133 _argvec[11] = (unsigned long)(arg11); \
4134 __asm__ volatile( \
4135 VALGRIND_ALIGN_STACK \
4136 "sub sp, sp, #4 \n\t" \
4137 "ldr r0, [%1, #40] \n\t" \
4138 "ldr r1, [%1, #44] \n\t" \
4139 "push {r0, r1} \n\t" \
4140 "ldr r0, [%1, #20] \n\t" \
4141 "ldr r1, [%1, #24] \n\t" \
4142 "ldr r2, [%1, #28] \n\t" \
4143 "ldr r3, [%1, #32] \n\t" \
4144 "ldr r4, [%1, #36] \n\t" \
4145 "push {r0, r1, r2, r3, r4} \n\t" \
4146 "ldr r0, [%1, #4] \n\t" \
4147 "ldr r1, [%1, #8] \n\t" \
4148 "ldr r2, [%1, #12] \n\t" \
4149 "ldr r3, [%1, #16] \n\t" \
4150 "ldr r4, [%1] \n\t" /* target->r4 */ \
4151 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4152 VALGRIND_RESTORE_STACK \
4153 "mov %0, r0" \
4154 : /*out*/ "=r" (_res) \
4155 : /*in*/ "0" (&_argvec[0]) \
4156 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4157 ); \
4158 lval = (__typeof__(lval)) _res; \
4159 } while (0)
4160
4161 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4162 arg6,arg7,arg8,arg9,arg10, \
4163 arg11,arg12) \
4164 do { \
4165 volatile OrigFn _orig = (orig); \
4166 volatile unsigned long _argvec[13]; \
4167 volatile unsigned long _res; \
4168 _argvec[0] = (unsigned long)_orig.nraddr; \
4169 _argvec[1] = (unsigned long)(arg1); \
4170 _argvec[2] = (unsigned long)(arg2); \
4171 _argvec[3] = (unsigned long)(arg3); \
4172 _argvec[4] = (unsigned long)(arg4); \
4173 _argvec[5] = (unsigned long)(arg5); \
4174 _argvec[6] = (unsigned long)(arg6); \
4175 _argvec[7] = (unsigned long)(arg7); \
4176 _argvec[8] = (unsigned long)(arg8); \
4177 _argvec[9] = (unsigned long)(arg9); \
4178 _argvec[10] = (unsigned long)(arg10); \
4179 _argvec[11] = (unsigned long)(arg11); \
4180 _argvec[12] = (unsigned long)(arg12); \
4181 __asm__ volatile( \
4182 VALGRIND_ALIGN_STACK \
4183 "ldr r0, [%1, #40] \n\t" \
4184 "ldr r1, [%1, #44] \n\t" \
4185 "ldr r2, [%1, #48] \n\t" \
4186 "push {r0, r1, r2} \n\t" \
4187 "ldr r0, [%1, #20] \n\t" \
4188 "ldr r1, [%1, #24] \n\t" \
4189 "ldr r2, [%1, #28] \n\t" \
4190 "ldr r3, [%1, #32] \n\t" \
4191 "ldr r4, [%1, #36] \n\t" \
4192 "push {r0, r1, r2, r3, r4} \n\t" \
4193 "ldr r0, [%1, #4] \n\t" \
4194 "ldr r1, [%1, #8] \n\t" \
4195 "ldr r2, [%1, #12] \n\t" \
4196 "ldr r3, [%1, #16] \n\t" \
4197 "ldr r4, [%1] \n\t" /* target->r4 */ \
4198 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4199 VALGRIND_RESTORE_STACK \
4200 "mov %0, r0" \
4201 : /*out*/ "=r" (_res) \
4202 : /*in*/ "0" (&_argvec[0]) \
4203 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4204 ); \
4205 lval = (__typeof__(lval)) _res; \
4206 } while (0)
4207
4208 #endif /* PLAT_arm_linux */
4209
4210 /* ------------------------ arm64-linux ------------------------ */
4211
4212 #if defined(PLAT_arm64_linux)
4213
4214 /* These regs are trashed by the hidden call. */
4215 #define __CALLER_SAVED_REGS \
4216 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4217 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4218 "x18", "x19", "x20", "x30", \
4219 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4220 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4221 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4222 "v26", "v27", "v28", "v29", "v30", "v31"
4223
4224 /* x21 is callee-saved, so we can use it to save and restore SP around
4225 the hidden call. */
4226 #define VALGRIND_ALIGN_STACK \
4227 "mov x21, sp\n\t" \
4228 "bic sp, x21, #15\n\t"
4229 #define VALGRIND_RESTORE_STACK \
4230 "mov sp, x21\n\t"
4231
4232 /* These CALL_FN_ macros assume that on arm64-linux,
4233 sizeof(unsigned long) == 8. */
4234
4235 #define CALL_FN_W_v(lval, orig) \
4236 do { \
4237 volatile OrigFn _orig = (orig); \
4238 volatile unsigned long _argvec[1]; \
4239 volatile unsigned long _res; \
4240 _argvec[0] = (unsigned long)_orig.nraddr; \
4241 __asm__ volatile( \
4242 VALGRIND_ALIGN_STACK \
4243 "ldr x8, [%1] \n\t" /* target->x8 */ \
4244 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4245 VALGRIND_RESTORE_STACK \
4246 "mov %0, x0\n" \
4247 : /*out*/ "=r" (_res) \
4248 : /*in*/ "0" (&_argvec[0]) \
4249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4250 ); \
4251 lval = (__typeof__(lval)) _res; \
4252 } while (0)
4253
4254 #define CALL_FN_W_W(lval, orig, arg1) \
4255 do { \
4256 volatile OrigFn _orig = (orig); \
4257 volatile unsigned long _argvec[2]; \
4258 volatile unsigned long _res; \
4259 _argvec[0] = (unsigned long)_orig.nraddr; \
4260 _argvec[1] = (unsigned long)(arg1); \
4261 __asm__ volatile( \
4262 VALGRIND_ALIGN_STACK \
4263 "ldr x0, [%1, #8] \n\t" \
4264 "ldr x8, [%1] \n\t" /* target->x8 */ \
4265 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4266 VALGRIND_RESTORE_STACK \
4267 "mov %0, x0\n" \
4268 : /*out*/ "=r" (_res) \
4269 : /*in*/ "0" (&_argvec[0]) \
4270 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4271 ); \
4272 lval = (__typeof__(lval)) _res; \
4273 } while (0)
4274
4275 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4276 do { \
4277 volatile OrigFn _orig = (orig); \
4278 volatile unsigned long _argvec[3]; \
4279 volatile unsigned long _res; \
4280 _argvec[0] = (unsigned long)_orig.nraddr; \
4281 _argvec[1] = (unsigned long)(arg1); \
4282 _argvec[2] = (unsigned long)(arg2); \
4283 __asm__ volatile( \
4284 VALGRIND_ALIGN_STACK \
4285 "ldr x0, [%1, #8] \n\t" \
4286 "ldr x1, [%1, #16] \n\t" \
4287 "ldr x8, [%1] \n\t" /* target->x8 */ \
4288 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4289 VALGRIND_RESTORE_STACK \
4290 "mov %0, x0\n" \
4291 : /*out*/ "=r" (_res) \
4292 : /*in*/ "0" (&_argvec[0]) \
4293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4294 ); \
4295 lval = (__typeof__(lval)) _res; \
4296 } while (0)
4297
4298 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4299 do { \
4300 volatile OrigFn _orig = (orig); \
4301 volatile unsigned long _argvec[4]; \
4302 volatile unsigned long _res; \
4303 _argvec[0] = (unsigned long)_orig.nraddr; \
4304 _argvec[1] = (unsigned long)(arg1); \
4305 _argvec[2] = (unsigned long)(arg2); \
4306 _argvec[3] = (unsigned long)(arg3); \
4307 __asm__ volatile( \
4308 VALGRIND_ALIGN_STACK \
4309 "ldr x0, [%1, #8] \n\t" \
4310 "ldr x1, [%1, #16] \n\t" \
4311 "ldr x2, [%1, #24] \n\t" \
4312 "ldr x8, [%1] \n\t" /* target->x8 */ \
4313 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4314 VALGRIND_RESTORE_STACK \
4315 "mov %0, x0\n" \
4316 : /*out*/ "=r" (_res) \
4317 : /*in*/ "0" (&_argvec[0]) \
4318 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4319 ); \
4320 lval = (__typeof__(lval)) _res; \
4321 } while (0)
4322
4323 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4324 do { \
4325 volatile OrigFn _orig = (orig); \
4326 volatile unsigned long _argvec[5]; \
4327 volatile unsigned long _res; \
4328 _argvec[0] = (unsigned long)_orig.nraddr; \
4329 _argvec[1] = (unsigned long)(arg1); \
4330 _argvec[2] = (unsigned long)(arg2); \
4331 _argvec[3] = (unsigned long)(arg3); \
4332 _argvec[4] = (unsigned long)(arg4); \
4333 __asm__ volatile( \
4334 VALGRIND_ALIGN_STACK \
4335 "ldr x0, [%1, #8] \n\t" \
4336 "ldr x1, [%1, #16] \n\t" \
4337 "ldr x2, [%1, #24] \n\t" \
4338 "ldr x3, [%1, #32] \n\t" \
4339 "ldr x8, [%1] \n\t" /* target->x8 */ \
4340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4341 VALGRIND_RESTORE_STACK \
4342 "mov %0, x0" \
4343 : /*out*/ "=r" (_res) \
4344 : /*in*/ "0" (&_argvec[0]) \
4345 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4346 ); \
4347 lval = (__typeof__(lval)) _res; \
4348 } while (0)
4349
4350 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4351 do { \
4352 volatile OrigFn _orig = (orig); \
4353 volatile unsigned long _argvec[6]; \
4354 volatile unsigned long _res; \
4355 _argvec[0] = (unsigned long)_orig.nraddr; \
4356 _argvec[1] = (unsigned long)(arg1); \
4357 _argvec[2] = (unsigned long)(arg2); \
4358 _argvec[3] = (unsigned long)(arg3); \
4359 _argvec[4] = (unsigned long)(arg4); \
4360 _argvec[5] = (unsigned long)(arg5); \
4361 __asm__ volatile( \
4362 VALGRIND_ALIGN_STACK \
4363 "ldr x0, [%1, #8] \n\t" \
4364 "ldr x1, [%1, #16] \n\t" \
4365 "ldr x2, [%1, #24] \n\t" \
4366 "ldr x3, [%1, #32] \n\t" \
4367 "ldr x4, [%1, #40] \n\t" \
4368 "ldr x8, [%1] \n\t" /* target->x8 */ \
4369 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4370 VALGRIND_RESTORE_STACK \
4371 "mov %0, x0" \
4372 : /*out*/ "=r" (_res) \
4373 : /*in*/ "0" (&_argvec[0]) \
4374 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4375 ); \
4376 lval = (__typeof__(lval)) _res; \
4377 } while (0)
4378
4379 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4380 do { \
4381 volatile OrigFn _orig = (orig); \
4382 volatile unsigned long _argvec[7]; \
4383 volatile unsigned long _res; \
4384 _argvec[0] = (unsigned long)_orig.nraddr; \
4385 _argvec[1] = (unsigned long)(arg1); \
4386 _argvec[2] = (unsigned long)(arg2); \
4387 _argvec[3] = (unsigned long)(arg3); \
4388 _argvec[4] = (unsigned long)(arg4); \
4389 _argvec[5] = (unsigned long)(arg5); \
4390 _argvec[6] = (unsigned long)(arg6); \
4391 __asm__ volatile( \
4392 VALGRIND_ALIGN_STACK \
4393 "ldr x0, [%1, #8] \n\t" \
4394 "ldr x1, [%1, #16] \n\t" \
4395 "ldr x2, [%1, #24] \n\t" \
4396 "ldr x3, [%1, #32] \n\t" \
4397 "ldr x4, [%1, #40] \n\t" \
4398 "ldr x5, [%1, #48] \n\t" \
4399 "ldr x8, [%1] \n\t" /* target->x8 */ \
4400 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4401 VALGRIND_RESTORE_STACK \
4402 "mov %0, x0" \
4403 : /*out*/ "=r" (_res) \
4404 : /*in*/ "0" (&_argvec[0]) \
4405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4406 ); \
4407 lval = (__typeof__(lval)) _res; \
4408 } while (0)
4409
4410 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4411 arg7) \
4412 do { \
4413 volatile OrigFn _orig = (orig); \
4414 volatile unsigned long _argvec[8]; \
4415 volatile unsigned long _res; \
4416 _argvec[0] = (unsigned long)_orig.nraddr; \
4417 _argvec[1] = (unsigned long)(arg1); \
4418 _argvec[2] = (unsigned long)(arg2); \
4419 _argvec[3] = (unsigned long)(arg3); \
4420 _argvec[4] = (unsigned long)(arg4); \
4421 _argvec[5] = (unsigned long)(arg5); \
4422 _argvec[6] = (unsigned long)(arg6); \
4423 _argvec[7] = (unsigned long)(arg7); \
4424 __asm__ volatile( \
4425 VALGRIND_ALIGN_STACK \
4426 "ldr x0, [%1, #8] \n\t" \
4427 "ldr x1, [%1, #16] \n\t" \
4428 "ldr x2, [%1, #24] \n\t" \
4429 "ldr x3, [%1, #32] \n\t" \
4430 "ldr x4, [%1, #40] \n\t" \
4431 "ldr x5, [%1, #48] \n\t" \
4432 "ldr x6, [%1, #56] \n\t" \
4433 "ldr x8, [%1] \n\t" /* target->x8 */ \
4434 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4435 VALGRIND_RESTORE_STACK \
4436 "mov %0, x0" \
4437 : /*out*/ "=r" (_res) \
4438 : /*in*/ "0" (&_argvec[0]) \
4439 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4440 ); \
4441 lval = (__typeof__(lval)) _res; \
4442 } while (0)
4443
4444 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4445 arg7,arg8) \
4446 do { \
4447 volatile OrigFn _orig = (orig); \
4448 volatile unsigned long _argvec[9]; \
4449 volatile unsigned long _res; \
4450 _argvec[0] = (unsigned long)_orig.nraddr; \
4451 _argvec[1] = (unsigned long)(arg1); \
4452 _argvec[2] = (unsigned long)(arg2); \
4453 _argvec[3] = (unsigned long)(arg3); \
4454 _argvec[4] = (unsigned long)(arg4); \
4455 _argvec[5] = (unsigned long)(arg5); \
4456 _argvec[6] = (unsigned long)(arg6); \
4457 _argvec[7] = (unsigned long)(arg7); \
4458 _argvec[8] = (unsigned long)(arg8); \
4459 __asm__ volatile( \
4460 VALGRIND_ALIGN_STACK \
4461 "ldr x0, [%1, #8] \n\t" \
4462 "ldr x1, [%1, #16] \n\t" \
4463 "ldr x2, [%1, #24] \n\t" \
4464 "ldr x3, [%1, #32] \n\t" \
4465 "ldr x4, [%1, #40] \n\t" \
4466 "ldr x5, [%1, #48] \n\t" \
4467 "ldr x6, [%1, #56] \n\t" \
4468 "ldr x7, [%1, #64] \n\t" \
4469 "ldr x8, [%1] \n\t" /* target->x8 */ \
4470 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4471 VALGRIND_RESTORE_STACK \
4472 "mov %0, x0" \
4473 : /*out*/ "=r" (_res) \
4474 : /*in*/ "0" (&_argvec[0]) \
4475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4476 ); \
4477 lval = (__typeof__(lval)) _res; \
4478 } while (0)
4479
4480 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4481 arg7,arg8,arg9) \
4482 do { \
4483 volatile OrigFn _orig = (orig); \
4484 volatile unsigned long _argvec[10]; \
4485 volatile unsigned long _res; \
4486 _argvec[0] = (unsigned long)_orig.nraddr; \
4487 _argvec[1] = (unsigned long)(arg1); \
4488 _argvec[2] = (unsigned long)(arg2); \
4489 _argvec[3] = (unsigned long)(arg3); \
4490 _argvec[4] = (unsigned long)(arg4); \
4491 _argvec[5] = (unsigned long)(arg5); \
4492 _argvec[6] = (unsigned long)(arg6); \
4493 _argvec[7] = (unsigned long)(arg7); \
4494 _argvec[8] = (unsigned long)(arg8); \
4495 _argvec[9] = (unsigned long)(arg9); \
4496 __asm__ volatile( \
4497 VALGRIND_ALIGN_STACK \
4498 "sub sp, sp, #0x20 \n\t" \
4499 "ldr x0, [%1, #8] \n\t" \
4500 "ldr x1, [%1, #16] \n\t" \
4501 "ldr x2, [%1, #24] \n\t" \
4502 "ldr x3, [%1, #32] \n\t" \
4503 "ldr x4, [%1, #40] \n\t" \
4504 "ldr x5, [%1, #48] \n\t" \
4505 "ldr x6, [%1, #56] \n\t" \
4506 "ldr x7, [%1, #64] \n\t" \
4507 "ldr x8, [%1, #72] \n\t" \
4508 "str x8, [sp, #0] \n\t" \
4509 "ldr x8, [%1] \n\t" /* target->x8 */ \
4510 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4511 VALGRIND_RESTORE_STACK \
4512 "mov %0, x0" \
4513 : /*out*/ "=r" (_res) \
4514 : /*in*/ "0" (&_argvec[0]) \
4515 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4516 ); \
4517 lval = (__typeof__(lval)) _res; \
4518 } while (0)
4519
4520 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4521 arg7,arg8,arg9,arg10) \
4522 do { \
4523 volatile OrigFn _orig = (orig); \
4524 volatile unsigned long _argvec[11]; \
4525 volatile unsigned long _res; \
4526 _argvec[0] = (unsigned long)_orig.nraddr; \
4527 _argvec[1] = (unsigned long)(arg1); \
4528 _argvec[2] = (unsigned long)(arg2); \
4529 _argvec[3] = (unsigned long)(arg3); \
4530 _argvec[4] = (unsigned long)(arg4); \
4531 _argvec[5] = (unsigned long)(arg5); \
4532 _argvec[6] = (unsigned long)(arg6); \
4533 _argvec[7] = (unsigned long)(arg7); \
4534 _argvec[8] = (unsigned long)(arg8); \
4535 _argvec[9] = (unsigned long)(arg9); \
4536 _argvec[10] = (unsigned long)(arg10); \
4537 __asm__ volatile( \
4538 VALGRIND_ALIGN_STACK \
4539 "sub sp, sp, #0x20 \n\t" \
4540 "ldr x0, [%1, #8] \n\t" \
4541 "ldr x1, [%1, #16] \n\t" \
4542 "ldr x2, [%1, #24] \n\t" \
4543 "ldr x3, [%1, #32] \n\t" \
4544 "ldr x4, [%1, #40] \n\t" \
4545 "ldr x5, [%1, #48] \n\t" \
4546 "ldr x6, [%1, #56] \n\t" \
4547 "ldr x7, [%1, #64] \n\t" \
4548 "ldr x8, [%1, #72] \n\t" \
4549 "str x8, [sp, #0] \n\t" \
4550 "ldr x8, [%1, #80] \n\t" \
4551 "str x8, [sp, #8] \n\t" \
4552 "ldr x8, [%1] \n\t" /* target->x8 */ \
4553 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4554 VALGRIND_RESTORE_STACK \
4555 "mov %0, x0" \
4556 : /*out*/ "=r" (_res) \
4557 : /*in*/ "0" (&_argvec[0]) \
4558 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4559 ); \
4560 lval = (__typeof__(lval)) _res; \
4561 } while (0)
4562
4563 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4564 arg7,arg8,arg9,arg10,arg11) \
4565 do { \
4566 volatile OrigFn _orig = (orig); \
4567 volatile unsigned long _argvec[12]; \
4568 volatile unsigned long _res; \
4569 _argvec[0] = (unsigned long)_orig.nraddr; \
4570 _argvec[1] = (unsigned long)(arg1); \
4571 _argvec[2] = (unsigned long)(arg2); \
4572 _argvec[3] = (unsigned long)(arg3); \
4573 _argvec[4] = (unsigned long)(arg4); \
4574 _argvec[5] = (unsigned long)(arg5); \
4575 _argvec[6] = (unsigned long)(arg6); \
4576 _argvec[7] = (unsigned long)(arg7); \
4577 _argvec[8] = (unsigned long)(arg8); \
4578 _argvec[9] = (unsigned long)(arg9); \
4579 _argvec[10] = (unsigned long)(arg10); \
4580 _argvec[11] = (unsigned long)(arg11); \
4581 __asm__ volatile( \
4582 VALGRIND_ALIGN_STACK \
4583 "sub sp, sp, #0x30 \n\t" \
4584 "ldr x0, [%1, #8] \n\t" \
4585 "ldr x1, [%1, #16] \n\t" \
4586 "ldr x2, [%1, #24] \n\t" \
4587 "ldr x3, [%1, #32] \n\t" \
4588 "ldr x4, [%1, #40] \n\t" \
4589 "ldr x5, [%1, #48] \n\t" \
4590 "ldr x6, [%1, #56] \n\t" \
4591 "ldr x7, [%1, #64] \n\t" \
4592 "ldr x8, [%1, #72] \n\t" \
4593 "str x8, [sp, #0] \n\t" \
4594 "ldr x8, [%1, #80] \n\t" \
4595 "str x8, [sp, #8] \n\t" \
4596 "ldr x8, [%1, #88] \n\t" \
4597 "str x8, [sp, #16] \n\t" \
4598 "ldr x8, [%1] \n\t" /* target->x8 */ \
4599 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4600 VALGRIND_RESTORE_STACK \
4601 "mov %0, x0" \
4602 : /*out*/ "=r" (_res) \
4603 : /*in*/ "0" (&_argvec[0]) \
4604 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4605 ); \
4606 lval = (__typeof__(lval)) _res; \
4607 } while (0)
4608
4609 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4610 arg7,arg8,arg9,arg10,arg11, \
4611 arg12) \
4612 do { \
4613 volatile OrigFn _orig = (orig); \
4614 volatile unsigned long _argvec[13]; \
4615 volatile unsigned long _res; \
4616 _argvec[0] = (unsigned long)_orig.nraddr; \
4617 _argvec[1] = (unsigned long)(arg1); \
4618 _argvec[2] = (unsigned long)(arg2); \
4619 _argvec[3] = (unsigned long)(arg3); \
4620 _argvec[4] = (unsigned long)(arg4); \
4621 _argvec[5] = (unsigned long)(arg5); \
4622 _argvec[6] = (unsigned long)(arg6); \
4623 _argvec[7] = (unsigned long)(arg7); \
4624 _argvec[8] = (unsigned long)(arg8); \
4625 _argvec[9] = (unsigned long)(arg9); \
4626 _argvec[10] = (unsigned long)(arg10); \
4627 _argvec[11] = (unsigned long)(arg11); \
4628 _argvec[12] = (unsigned long)(arg12); \
4629 __asm__ volatile( \
4630 VALGRIND_ALIGN_STACK \
4631 "sub sp, sp, #0x30 \n\t" \
4632 "ldr x0, [%1, #8] \n\t" \
4633 "ldr x1, [%1, #16] \n\t" \
4634 "ldr x2, [%1, #24] \n\t" \
4635 "ldr x3, [%1, #32] \n\t" \
4636 "ldr x4, [%1, #40] \n\t" \
4637 "ldr x5, [%1, #48] \n\t" \
4638 "ldr x6, [%1, #56] \n\t" \
4639 "ldr x7, [%1, #64] \n\t" \
4640 "ldr x8, [%1, #72] \n\t" \
4641 "str x8, [sp, #0] \n\t" \
4642 "ldr x8, [%1, #80] \n\t" \
4643 "str x8, [sp, #8] \n\t" \
4644 "ldr x8, [%1, #88] \n\t" \
4645 "str x8, [sp, #16] \n\t" \
4646 "ldr x8, [%1, #96] \n\t" \
4647 "str x8, [sp, #24] \n\t" \
4648 "ldr x8, [%1] \n\t" /* target->x8 */ \
4649 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4650 VALGRIND_RESTORE_STACK \
4651 "mov %0, x0" \
4652 : /*out*/ "=r" (_res) \
4653 : /*in*/ "0" (&_argvec[0]) \
4654 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4655 ); \
4656 lval = (__typeof__(lval)) _res; \
4657 } while (0)
4658
4659 #endif /* PLAT_arm64_linux */
4660
4661 /* ------------------------- s390x-linux ------------------------- */
4662
4663 #if defined(PLAT_s390x_linux)
4664
4665 /* Similar workaround as amd64 (see above), but we use r11 as frame
4666 pointer and save the old r11 in r7. r11 might be used for
4667 argvec, therefore we copy argvec in r1 since r1 is clobbered
4668 after the call anyway. */
4669 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4670 # define __FRAME_POINTER \
4671 ,"d"(__builtin_dwarf_cfa())
4672 # define VALGRIND_CFI_PROLOGUE \
4673 ".cfi_remember_state\n\t" \
4674 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4675 "lgr 7,11\n\t" \
4676 "lgr 11,%2\n\t" \
4677 ".cfi_def_cfa r11, 0\n\t"
4678 # define VALGRIND_CFI_EPILOGUE \
4679 "lgr 11, 7\n\t" \
4680 ".cfi_restore_state\n\t"
4681 #else
4682 # define __FRAME_POINTER
4683 # define VALGRIND_CFI_PROLOGUE \
4684 "lgr 1,%1\n\t"
4685 # define VALGRIND_CFI_EPILOGUE
4686 #endif
4687
4688 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4689 according to the s390 GCC maintainer. (The ABI specification is not
4690 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4691 VALGRIND_RESTORE_STACK are not defined here. */
4692
4693 /* These regs are trashed by the hidden call. Note that we overwrite
4694 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4695 function a proper return address. All others are ABI defined call
4696 clobbers. */
4697 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4698 "f0","f1","f2","f3","f4","f5","f6","f7"
4699
4700 /* Nb: Although r11 is modified in the asm snippets below (inside
4701 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4702 two reasons:
4703 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4704 modified
4705 (2) GCC will complain that r11 cannot appear inside a clobber section,
4706 when compiled with -O -fno-omit-frame-pointer
4707 */
4708
4709 #define CALL_FN_W_v(lval, orig) \
4710 do { \
4711 volatile OrigFn _orig = (orig); \
4712 volatile unsigned long _argvec[1]; \
4713 volatile unsigned long _res; \
4714 _argvec[0] = (unsigned long)_orig.nraddr; \
4715 __asm__ volatile( \
4716 VALGRIND_CFI_PROLOGUE \
4717 "aghi 15,-160\n\t" \
4718 "lg 1, 0(1)\n\t" /* target->r1 */ \
4719 VALGRIND_CALL_NOREDIR_R1 \
4720 "lgr %0, 2\n\t" \
4721 "aghi 15,160\n\t" \
4722 VALGRIND_CFI_EPILOGUE \
4723 : /*out*/ "=d" (_res) \
4724 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4725 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4726 ); \
4727 lval = (__typeof__(lval)) _res; \
4728 } while (0)
4729
4730 /* The call abi has the arguments in r2-r6 and stack */
4731 #define CALL_FN_W_W(lval, orig, arg1) \
4732 do { \
4733 volatile OrigFn _orig = (orig); \
4734 volatile unsigned long _argvec[2]; \
4735 volatile unsigned long _res; \
4736 _argvec[0] = (unsigned long)_orig.nraddr; \
4737 _argvec[1] = (unsigned long)arg1; \
4738 __asm__ volatile( \
4739 VALGRIND_CFI_PROLOGUE \
4740 "aghi 15,-160\n\t" \
4741 "lg 2, 8(1)\n\t" \
4742 "lg 1, 0(1)\n\t" \
4743 VALGRIND_CALL_NOREDIR_R1 \
4744 "lgr %0, 2\n\t" \
4745 "aghi 15,160\n\t" \
4746 VALGRIND_CFI_EPILOGUE \
4747 : /*out*/ "=d" (_res) \
4748 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4749 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4750 ); \
4751 lval = (__typeof__(lval)) _res; \
4752 } while (0)
4753
4754 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4755 do { \
4756 volatile OrigFn _orig = (orig); \
4757 volatile unsigned long _argvec[3]; \
4758 volatile unsigned long _res; \
4759 _argvec[0] = (unsigned long)_orig.nraddr; \
4760 _argvec[1] = (unsigned long)arg1; \
4761 _argvec[2] = (unsigned long)arg2; \
4762 __asm__ volatile( \
4763 VALGRIND_CFI_PROLOGUE \
4764 "aghi 15,-160\n\t" \
4765 "lg 2, 8(1)\n\t" \
4766 "lg 3,16(1)\n\t" \
4767 "lg 1, 0(1)\n\t" \
4768 VALGRIND_CALL_NOREDIR_R1 \
4769 "lgr %0, 2\n\t" \
4770 "aghi 15,160\n\t" \
4771 VALGRIND_CFI_EPILOGUE \
4772 : /*out*/ "=d" (_res) \
4773 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4774 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4775 ); \
4776 lval = (__typeof__(lval)) _res; \
4777 } while (0)
4778
4779 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4780 do { \
4781 volatile OrigFn _orig = (orig); \
4782 volatile unsigned long _argvec[4]; \
4783 volatile unsigned long _res; \
4784 _argvec[0] = (unsigned long)_orig.nraddr; \
4785 _argvec[1] = (unsigned long)arg1; \
4786 _argvec[2] = (unsigned long)arg2; \
4787 _argvec[3] = (unsigned long)arg3; \
4788 __asm__ volatile( \
4789 VALGRIND_CFI_PROLOGUE \
4790 "aghi 15,-160\n\t" \
4791 "lg 2, 8(1)\n\t" \
4792 "lg 3,16(1)\n\t" \
4793 "lg 4,24(1)\n\t" \
4794 "lg 1, 0(1)\n\t" \
4795 VALGRIND_CALL_NOREDIR_R1 \
4796 "lgr %0, 2\n\t" \
4797 "aghi 15,160\n\t" \
4798 VALGRIND_CFI_EPILOGUE \
4799 : /*out*/ "=d" (_res) \
4800 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4801 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4802 ); \
4803 lval = (__typeof__(lval)) _res; \
4804 } while (0)
4805
4806 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4807 do { \
4808 volatile OrigFn _orig = (orig); \
4809 volatile unsigned long _argvec[5]; \
4810 volatile unsigned long _res; \
4811 _argvec[0] = (unsigned long)_orig.nraddr; \
4812 _argvec[1] = (unsigned long)arg1; \
4813 _argvec[2] = (unsigned long)arg2; \
4814 _argvec[3] = (unsigned long)arg3; \
4815 _argvec[4] = (unsigned long)arg4; \
4816 __asm__ volatile( \
4817 VALGRIND_CFI_PROLOGUE \
4818 "aghi 15,-160\n\t" \
4819 "lg 2, 8(1)\n\t" \
4820 "lg 3,16(1)\n\t" \
4821 "lg 4,24(1)\n\t" \
4822 "lg 5,32(1)\n\t" \
4823 "lg 1, 0(1)\n\t" \
4824 VALGRIND_CALL_NOREDIR_R1 \
4825 "lgr %0, 2\n\t" \
4826 "aghi 15,160\n\t" \
4827 VALGRIND_CFI_EPILOGUE \
4828 : /*out*/ "=d" (_res) \
4829 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4830 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4831 ); \
4832 lval = (__typeof__(lval)) _res; \
4833 } while (0)
4834
4835 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4836 do { \
4837 volatile OrigFn _orig = (orig); \
4838 volatile unsigned long _argvec[6]; \
4839 volatile unsigned long _res; \
4840 _argvec[0] = (unsigned long)_orig.nraddr; \
4841 _argvec[1] = (unsigned long)arg1; \
4842 _argvec[2] = (unsigned long)arg2; \
4843 _argvec[3] = (unsigned long)arg3; \
4844 _argvec[4] = (unsigned long)arg4; \
4845 _argvec[5] = (unsigned long)arg5; \
4846 __asm__ volatile( \
4847 VALGRIND_CFI_PROLOGUE \
4848 "aghi 15,-160\n\t" \
4849 "lg 2, 8(1)\n\t" \
4850 "lg 3,16(1)\n\t" \
4851 "lg 4,24(1)\n\t" \
4852 "lg 5,32(1)\n\t" \
4853 "lg 6,40(1)\n\t" \
4854 "lg 1, 0(1)\n\t" \
4855 VALGRIND_CALL_NOREDIR_R1 \
4856 "lgr %0, 2\n\t" \
4857 "aghi 15,160\n\t" \
4858 VALGRIND_CFI_EPILOGUE \
4859 : /*out*/ "=d" (_res) \
4860 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4861 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4862 ); \
4863 lval = (__typeof__(lval)) _res; \
4864 } while (0)
4865
4866 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4867 arg6) \
4868 do { \
4869 volatile OrigFn _orig = (orig); \
4870 volatile unsigned long _argvec[7]; \
4871 volatile unsigned long _res; \
4872 _argvec[0] = (unsigned long)_orig.nraddr; \
4873 _argvec[1] = (unsigned long)arg1; \
4874 _argvec[2] = (unsigned long)arg2; \
4875 _argvec[3] = (unsigned long)arg3; \
4876 _argvec[4] = (unsigned long)arg4; \
4877 _argvec[5] = (unsigned long)arg5; \
4878 _argvec[6] = (unsigned long)arg6; \
4879 __asm__ volatile( \
4880 VALGRIND_CFI_PROLOGUE \
4881 "aghi 15,-168\n\t" \
4882 "lg 2, 8(1)\n\t" \
4883 "lg 3,16(1)\n\t" \
4884 "lg 4,24(1)\n\t" \
4885 "lg 5,32(1)\n\t" \
4886 "lg 6,40(1)\n\t" \
4887 "mvc 160(8,15), 48(1)\n\t" \
4888 "lg 1, 0(1)\n\t" \
4889 VALGRIND_CALL_NOREDIR_R1 \
4890 "lgr %0, 2\n\t" \
4891 "aghi 15,168\n\t" \
4892 VALGRIND_CFI_EPILOGUE \
4893 : /*out*/ "=d" (_res) \
4894 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4895 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4896 ); \
4897 lval = (__typeof__(lval)) _res; \
4898 } while (0)
4899
4900 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4901 arg6, arg7) \
4902 do { \
4903 volatile OrigFn _orig = (orig); \
4904 volatile unsigned long _argvec[8]; \
4905 volatile unsigned long _res; \
4906 _argvec[0] = (unsigned long)_orig.nraddr; \
4907 _argvec[1] = (unsigned long)arg1; \
4908 _argvec[2] = (unsigned long)arg2; \
4909 _argvec[3] = (unsigned long)arg3; \
4910 _argvec[4] = (unsigned long)arg4; \
4911 _argvec[5] = (unsigned long)arg5; \
4912 _argvec[6] = (unsigned long)arg6; \
4913 _argvec[7] = (unsigned long)arg7; \
4914 __asm__ volatile( \
4915 VALGRIND_CFI_PROLOGUE \
4916 "aghi 15,-176\n\t" \
4917 "lg 2, 8(1)\n\t" \
4918 "lg 3,16(1)\n\t" \
4919 "lg 4,24(1)\n\t" \
4920 "lg 5,32(1)\n\t" \
4921 "lg 6,40(1)\n\t" \
4922 "mvc 160(8,15), 48(1)\n\t" \
4923 "mvc 168(8,15), 56(1)\n\t" \
4924 "lg 1, 0(1)\n\t" \
4925 VALGRIND_CALL_NOREDIR_R1 \
4926 "lgr %0, 2\n\t" \
4927 "aghi 15,176\n\t" \
4928 VALGRIND_CFI_EPILOGUE \
4929 : /*out*/ "=d" (_res) \
4930 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4931 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4932 ); \
4933 lval = (__typeof__(lval)) _res; \
4934 } while (0)
4935
4936 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4937 arg6, arg7 ,arg8) \
4938 do { \
4939 volatile OrigFn _orig = (orig); \
4940 volatile unsigned long _argvec[9]; \
4941 volatile unsigned long _res; \
4942 _argvec[0] = (unsigned long)_orig.nraddr; \
4943 _argvec[1] = (unsigned long)arg1; \
4944 _argvec[2] = (unsigned long)arg2; \
4945 _argvec[3] = (unsigned long)arg3; \
4946 _argvec[4] = (unsigned long)arg4; \
4947 _argvec[5] = (unsigned long)arg5; \
4948 _argvec[6] = (unsigned long)arg6; \
4949 _argvec[7] = (unsigned long)arg7; \
4950 _argvec[8] = (unsigned long)arg8; \
4951 __asm__ volatile( \
4952 VALGRIND_CFI_PROLOGUE \
4953 "aghi 15,-184\n\t" \
4954 "lg 2, 8(1)\n\t" \
4955 "lg 3,16(1)\n\t" \
4956 "lg 4,24(1)\n\t" \
4957 "lg 5,32(1)\n\t" \
4958 "lg 6,40(1)\n\t" \
4959 "mvc 160(8,15), 48(1)\n\t" \
4960 "mvc 168(8,15), 56(1)\n\t" \
4961 "mvc 176(8,15), 64(1)\n\t" \
4962 "lg 1, 0(1)\n\t" \
4963 VALGRIND_CALL_NOREDIR_R1 \
4964 "lgr %0, 2\n\t" \
4965 "aghi 15,184\n\t" \
4966 VALGRIND_CFI_EPILOGUE \
4967 : /*out*/ "=d" (_res) \
4968 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4969 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4970 ); \
4971 lval = (__typeof__(lval)) _res; \
4972 } while (0)
4973
4974 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4975 arg6, arg7 ,arg8, arg9) \
4976 do { \
4977 volatile OrigFn _orig = (orig); \
4978 volatile unsigned long _argvec[10]; \
4979 volatile unsigned long _res; \
4980 _argvec[0] = (unsigned long)_orig.nraddr; \
4981 _argvec[1] = (unsigned long)arg1; \
4982 _argvec[2] = (unsigned long)arg2; \
4983 _argvec[3] = (unsigned long)arg3; \
4984 _argvec[4] = (unsigned long)arg4; \
4985 _argvec[5] = (unsigned long)arg5; \
4986 _argvec[6] = (unsigned long)arg6; \
4987 _argvec[7] = (unsigned long)arg7; \
4988 _argvec[8] = (unsigned long)arg8; \
4989 _argvec[9] = (unsigned long)arg9; \
4990 __asm__ volatile( \
4991 VALGRIND_CFI_PROLOGUE \
4992 "aghi 15,-192\n\t" \
4993 "lg 2, 8(1)\n\t" \
4994 "lg 3,16(1)\n\t" \
4995 "lg 4,24(1)\n\t" \
4996 "lg 5,32(1)\n\t" \
4997 "lg 6,40(1)\n\t" \
4998 "mvc 160(8,15), 48(1)\n\t" \
4999 "mvc 168(8,15), 56(1)\n\t" \
5000 "mvc 176(8,15), 64(1)\n\t" \
5001 "mvc 184(8,15), 72(1)\n\t" \
5002 "lg 1, 0(1)\n\t" \
5003 VALGRIND_CALL_NOREDIR_R1 \
5004 "lgr %0, 2\n\t" \
5005 "aghi 15,192\n\t" \
5006 VALGRIND_CFI_EPILOGUE \
5007 : /*out*/ "=d" (_res) \
5008 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5009 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5010 ); \
5011 lval = (__typeof__(lval)) _res; \
5012 } while (0)
5013
5014 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5015 arg6, arg7 ,arg8, arg9, arg10) \
5016 do { \
5017 volatile OrigFn _orig = (orig); \
5018 volatile unsigned long _argvec[11]; \
5019 volatile unsigned long _res; \
5020 _argvec[0] = (unsigned long)_orig.nraddr; \
5021 _argvec[1] = (unsigned long)arg1; \
5022 _argvec[2] = (unsigned long)arg2; \
5023 _argvec[3] = (unsigned long)arg3; \
5024 _argvec[4] = (unsigned long)arg4; \
5025 _argvec[5] = (unsigned long)arg5; \
5026 _argvec[6] = (unsigned long)arg6; \
5027 _argvec[7] = (unsigned long)arg7; \
5028 _argvec[8] = (unsigned long)arg8; \
5029 _argvec[9] = (unsigned long)arg9; \
5030 _argvec[10] = (unsigned long)arg10; \
5031 __asm__ volatile( \
5032 VALGRIND_CFI_PROLOGUE \
5033 "aghi 15,-200\n\t" \
5034 "lg 2, 8(1)\n\t" \
5035 "lg 3,16(1)\n\t" \
5036 "lg 4,24(1)\n\t" \
5037 "lg 5,32(1)\n\t" \
5038 "lg 6,40(1)\n\t" \
5039 "mvc 160(8,15), 48(1)\n\t" \
5040 "mvc 168(8,15), 56(1)\n\t" \
5041 "mvc 176(8,15), 64(1)\n\t" \
5042 "mvc 184(8,15), 72(1)\n\t" \
5043 "mvc 192(8,15), 80(1)\n\t" \
5044 "lg 1, 0(1)\n\t" \
5045 VALGRIND_CALL_NOREDIR_R1 \
5046 "lgr %0, 2\n\t" \
5047 "aghi 15,200\n\t" \
5048 VALGRIND_CFI_EPILOGUE \
5049 : /*out*/ "=d" (_res) \
5050 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5051 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5052 ); \
5053 lval = (__typeof__(lval)) _res; \
5054 } while (0)
5055
5056 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5057 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5058 do { \
5059 volatile OrigFn _orig = (orig); \
5060 volatile unsigned long _argvec[12]; \
5061 volatile unsigned long _res; \
5062 _argvec[0] = (unsigned long)_orig.nraddr; \
5063 _argvec[1] = (unsigned long)arg1; \
5064 _argvec[2] = (unsigned long)arg2; \
5065 _argvec[3] = (unsigned long)arg3; \
5066 _argvec[4] = (unsigned long)arg4; \
5067 _argvec[5] = (unsigned long)arg5; \
5068 _argvec[6] = (unsigned long)arg6; \
5069 _argvec[7] = (unsigned long)arg7; \
5070 _argvec[8] = (unsigned long)arg8; \
5071 _argvec[9] = (unsigned long)arg9; \
5072 _argvec[10] = (unsigned long)arg10; \
5073 _argvec[11] = (unsigned long)arg11; \
5074 __asm__ volatile( \
5075 VALGRIND_CFI_PROLOGUE \
5076 "aghi 15,-208\n\t" \
5077 "lg 2, 8(1)\n\t" \
5078 "lg 3,16(1)\n\t" \
5079 "lg 4,24(1)\n\t" \
5080 "lg 5,32(1)\n\t" \
5081 "lg 6,40(1)\n\t" \
5082 "mvc 160(8,15), 48(1)\n\t" \
5083 "mvc 168(8,15), 56(1)\n\t" \
5084 "mvc 176(8,15), 64(1)\n\t" \
5085 "mvc 184(8,15), 72(1)\n\t" \
5086 "mvc 192(8,15), 80(1)\n\t" \
5087 "mvc 200(8,15), 88(1)\n\t" \
5088 "lg 1, 0(1)\n\t" \
5089 VALGRIND_CALL_NOREDIR_R1 \
5090 "lgr %0, 2\n\t" \
5091 "aghi 15,208\n\t" \
5092 VALGRIND_CFI_EPILOGUE \
5093 : /*out*/ "=d" (_res) \
5094 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5095 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5096 ); \
5097 lval = (__typeof__(lval)) _res; \
5098 } while (0)
5099
5100 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5101 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5102 do { \
5103 volatile OrigFn _orig = (orig); \
5104 volatile unsigned long _argvec[13]; \
5105 volatile unsigned long _res; \
5106 _argvec[0] = (unsigned long)_orig.nraddr; \
5107 _argvec[1] = (unsigned long)arg1; \
5108 _argvec[2] = (unsigned long)arg2; \
5109 _argvec[3] = (unsigned long)arg3; \
5110 _argvec[4] = (unsigned long)arg4; \
5111 _argvec[5] = (unsigned long)arg5; \
5112 _argvec[6] = (unsigned long)arg6; \
5113 _argvec[7] = (unsigned long)arg7; \
5114 _argvec[8] = (unsigned long)arg8; \
5115 _argvec[9] = (unsigned long)arg9; \
5116 _argvec[10] = (unsigned long)arg10; \
5117 _argvec[11] = (unsigned long)arg11; \
5118 _argvec[12] = (unsigned long)arg12; \
5119 __asm__ volatile( \
5120 VALGRIND_CFI_PROLOGUE \
5121 "aghi 15,-216\n\t" \
5122 "lg 2, 8(1)\n\t" \
5123 "lg 3,16(1)\n\t" \
5124 "lg 4,24(1)\n\t" \
5125 "lg 5,32(1)\n\t" \
5126 "lg 6,40(1)\n\t" \
5127 "mvc 160(8,15), 48(1)\n\t" \
5128 "mvc 168(8,15), 56(1)\n\t" \
5129 "mvc 176(8,15), 64(1)\n\t" \
5130 "mvc 184(8,15), 72(1)\n\t" \
5131 "mvc 192(8,15), 80(1)\n\t" \
5132 "mvc 200(8,15), 88(1)\n\t" \
5133 "mvc 208(8,15), 96(1)\n\t" \
5134 "lg 1, 0(1)\n\t" \
5135 VALGRIND_CALL_NOREDIR_R1 \
5136 "lgr %0, 2\n\t" \
5137 "aghi 15,216\n\t" \
5138 VALGRIND_CFI_EPILOGUE \
5139 : /*out*/ "=d" (_res) \
5140 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5141 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5142 ); \
5143 lval = (__typeof__(lval)) _res; \
5144 } while (0)
5145
5146
5147 #endif /* PLAT_s390x_linux */
5148
5149 /* ------------------------- mips32-linux ----------------------- */
5150
5151 #if defined(PLAT_mips32_linux)
5152
5153 /* These regs are trashed by the hidden call. */
5154 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5155 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5156 "$25", "$31"
5157
5158 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5159 long) == 4. */
5160
5161 #define CALL_FN_W_v(lval, orig) \
5162 do { \
5163 volatile OrigFn _orig = (orig); \
5164 volatile unsigned long _argvec[1]; \
5165 volatile unsigned long _res; \
5166 _argvec[0] = (unsigned long)_orig.nraddr; \
5167 __asm__ volatile( \
5168 "subu $29, $29, 8 \n\t" \
5169 "sw $28, 0($29) \n\t" \
5170 "sw $31, 4($29) \n\t" \
5171 "subu $29, $29, 16 \n\t" \
5172 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5173 VALGRIND_CALL_NOREDIR_T9 \
5174 "addu $29, $29, 16\n\t" \
5175 "lw $28, 0($29) \n\t" \
5176 "lw $31, 4($29) \n\t" \
5177 "addu $29, $29, 8 \n\t" \
5178 "move %0, $2\n" \
5179 : /*out*/ "=r" (_res) \
5180 : /*in*/ "0" (&_argvec[0]) \
5181 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5182 ); \
5183 lval = (__typeof__(lval)) _res; \
5184 } while (0)
5185
5186 #define CALL_FN_W_W(lval, orig, arg1) \
5187 do { \
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[2]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5192 _argvec[1] = (unsigned long)(arg1); \
5193 __asm__ volatile( \
5194 "subu $29, $29, 8 \n\t" \
5195 "sw $28, 0($29) \n\t" \
5196 "sw $31, 4($29) \n\t" \
5197 "subu $29, $29, 16 \n\t" \
5198 "lw $4, 4(%1) \n\t" /* arg1*/ \
5199 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5200 VALGRIND_CALL_NOREDIR_T9 \
5201 "addu $29, $29, 16 \n\t" \
5202 "lw $28, 0($29) \n\t" \
5203 "lw $31, 4($29) \n\t" \
5204 "addu $29, $29, 8 \n\t" \
5205 "move %0, $2\n" \
5206 : /*out*/ "=r" (_res) \
5207 : /*in*/ "0" (&_argvec[0]) \
5208 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5209 ); \
5210 lval = (__typeof__(lval)) _res; \
5211 } while (0)
5212
5213 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5214 do { \
5215 volatile OrigFn _orig = (orig); \
5216 volatile unsigned long _argvec[3]; \
5217 volatile unsigned long _res; \
5218 _argvec[0] = (unsigned long)_orig.nraddr; \
5219 _argvec[1] = (unsigned long)(arg1); \
5220 _argvec[2] = (unsigned long)(arg2); \
5221 __asm__ volatile( \
5222 "subu $29, $29, 8 \n\t" \
5223 "sw $28, 0($29) \n\t" \
5224 "sw $31, 4($29) \n\t" \
5225 "subu $29, $29, 16 \n\t" \
5226 "lw $4, 4(%1) \n\t" \
5227 "lw $5, 8(%1) \n\t" \
5228 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5229 VALGRIND_CALL_NOREDIR_T9 \
5230 "addu $29, $29, 16 \n\t" \
5231 "lw $28, 0($29) \n\t" \
5232 "lw $31, 4($29) \n\t" \
5233 "addu $29, $29, 8 \n\t" \
5234 "move %0, $2\n" \
5235 : /*out*/ "=r" (_res) \
5236 : /*in*/ "0" (&_argvec[0]) \
5237 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5238 ); \
5239 lval = (__typeof__(lval)) _res; \
5240 } while (0)
5241
5242 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5243 do { \
5244 volatile OrigFn _orig = (orig); \
5245 volatile unsigned long _argvec[4]; \
5246 volatile unsigned long _res; \
5247 _argvec[0] = (unsigned long)_orig.nraddr; \
5248 _argvec[1] = (unsigned long)(arg1); \
5249 _argvec[2] = (unsigned long)(arg2); \
5250 _argvec[3] = (unsigned long)(arg3); \
5251 __asm__ volatile( \
5252 "subu $29, $29, 8 \n\t" \
5253 "sw $28, 0($29) \n\t" \
5254 "sw $31, 4($29) \n\t" \
5255 "subu $29, $29, 16 \n\t" \
5256 "lw $4, 4(%1) \n\t" \
5257 "lw $5, 8(%1) \n\t" \
5258 "lw $6, 12(%1) \n\t" \
5259 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5260 VALGRIND_CALL_NOREDIR_T9 \
5261 "addu $29, $29, 16 \n\t" \
5262 "lw $28, 0($29) \n\t" \
5263 "lw $31, 4($29) \n\t" \
5264 "addu $29, $29, 8 \n\t" \
5265 "move %0, $2\n" \
5266 : /*out*/ "=r" (_res) \
5267 : /*in*/ "0" (&_argvec[0]) \
5268 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5269 ); \
5270 lval = (__typeof__(lval)) _res; \
5271 } while (0)
5272
5273 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5274 do { \
5275 volatile OrigFn _orig = (orig); \
5276 volatile unsigned long _argvec[5]; \
5277 volatile unsigned long _res; \
5278 _argvec[0] = (unsigned long)_orig.nraddr; \
5279 _argvec[1] = (unsigned long)(arg1); \
5280 _argvec[2] = (unsigned long)(arg2); \
5281 _argvec[3] = (unsigned long)(arg3); \
5282 _argvec[4] = (unsigned long)(arg4); \
5283 __asm__ volatile( \
5284 "subu $29, $29, 8 \n\t" \
5285 "sw $28, 0($29) \n\t" \
5286 "sw $31, 4($29) \n\t" \
5287 "subu $29, $29, 16 \n\t" \
5288 "lw $4, 4(%1) \n\t" \
5289 "lw $5, 8(%1) \n\t" \
5290 "lw $6, 12(%1) \n\t" \
5291 "lw $7, 16(%1) \n\t" \
5292 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5293 VALGRIND_CALL_NOREDIR_T9 \
5294 "addu $29, $29, 16 \n\t" \
5295 "lw $28, 0($29) \n\t" \
5296 "lw $31, 4($29) \n\t" \
5297 "addu $29, $29, 8 \n\t" \
5298 "move %0, $2\n" \
5299 : /*out*/ "=r" (_res) \
5300 : /*in*/ "0" (&_argvec[0]) \
5301 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5302 ); \
5303 lval = (__typeof__(lval)) _res; \
5304 } while (0)
5305
5306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5307 do { \
5308 volatile OrigFn _orig = (orig); \
5309 volatile unsigned long _argvec[6]; \
5310 volatile unsigned long _res; \
5311 _argvec[0] = (unsigned long)_orig.nraddr; \
5312 _argvec[1] = (unsigned long)(arg1); \
5313 _argvec[2] = (unsigned long)(arg2); \
5314 _argvec[3] = (unsigned long)(arg3); \
5315 _argvec[4] = (unsigned long)(arg4); \
5316 _argvec[5] = (unsigned long)(arg5); \
5317 __asm__ volatile( \
5318 "subu $29, $29, 8 \n\t" \
5319 "sw $28, 0($29) \n\t" \
5320 "sw $31, 4($29) \n\t" \
5321 "lw $4, 20(%1) \n\t" \
5322 "subu $29, $29, 24\n\t" \
5323 "sw $4, 16($29) \n\t" \
5324 "lw $4, 4(%1) \n\t" \
5325 "lw $5, 8(%1) \n\t" \
5326 "lw $6, 12(%1) \n\t" \
5327 "lw $7, 16(%1) \n\t" \
5328 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5329 VALGRIND_CALL_NOREDIR_T9 \
5330 "addu $29, $29, 24 \n\t" \
5331 "lw $28, 0($29) \n\t" \
5332 "lw $31, 4($29) \n\t" \
5333 "addu $29, $29, 8 \n\t" \
5334 "move %0, $2\n" \
5335 : /*out*/ "=r" (_res) \
5336 : /*in*/ "0" (&_argvec[0]) \
5337 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5338 ); \
5339 lval = (__typeof__(lval)) _res; \
5340 } while (0)
5341 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5342 do { \
5343 volatile OrigFn _orig = (orig); \
5344 volatile unsigned long _argvec[7]; \
5345 volatile unsigned long _res; \
5346 _argvec[0] = (unsigned long)_orig.nraddr; \
5347 _argvec[1] = (unsigned long)(arg1); \
5348 _argvec[2] = (unsigned long)(arg2); \
5349 _argvec[3] = (unsigned long)(arg3); \
5350 _argvec[4] = (unsigned long)(arg4); \
5351 _argvec[5] = (unsigned long)(arg5); \
5352 _argvec[6] = (unsigned long)(arg6); \
5353 __asm__ volatile( \
5354 "subu $29, $29, 8 \n\t" \
5355 "sw $28, 0($29) \n\t" \
5356 "sw $31, 4($29) \n\t" \
5357 "lw $4, 20(%1) \n\t" \
5358 "subu $29, $29, 32\n\t" \
5359 "sw $4, 16($29) \n\t" \
5360 "lw $4, 24(%1) \n\t" \
5361 "nop\n\t" \
5362 "sw $4, 20($29) \n\t" \
5363 "lw $4, 4(%1) \n\t" \
5364 "lw $5, 8(%1) \n\t" \
5365 "lw $6, 12(%1) \n\t" \
5366 "lw $7, 16(%1) \n\t" \
5367 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5368 VALGRIND_CALL_NOREDIR_T9 \
5369 "addu $29, $29, 32 \n\t" \
5370 "lw $28, 0($29) \n\t" \
5371 "lw $31, 4($29) \n\t" \
5372 "addu $29, $29, 8 \n\t" \
5373 "move %0, $2\n" \
5374 : /*out*/ "=r" (_res) \
5375 : /*in*/ "0" (&_argvec[0]) \
5376 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5377 ); \
5378 lval = (__typeof__(lval)) _res; \
5379 } while (0)
5380
5381 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5382 arg7) \
5383 do { \
5384 volatile OrigFn _orig = (orig); \
5385 volatile unsigned long _argvec[8]; \
5386 volatile unsigned long _res; \
5387 _argvec[0] = (unsigned long)_orig.nraddr; \
5388 _argvec[1] = (unsigned long)(arg1); \
5389 _argvec[2] = (unsigned long)(arg2); \
5390 _argvec[3] = (unsigned long)(arg3); \
5391 _argvec[4] = (unsigned long)(arg4); \
5392 _argvec[5] = (unsigned long)(arg5); \
5393 _argvec[6] = (unsigned long)(arg6); \
5394 _argvec[7] = (unsigned long)(arg7); \
5395 __asm__ volatile( \
5396 "subu $29, $29, 8 \n\t" \
5397 "sw $28, 0($29) \n\t" \
5398 "sw $31, 4($29) \n\t" \
5399 "lw $4, 20(%1) \n\t" \
5400 "subu $29, $29, 32\n\t" \
5401 "sw $4, 16($29) \n\t" \
5402 "lw $4, 24(%1) \n\t" \
5403 "sw $4, 20($29) \n\t" \
5404 "lw $4, 28(%1) \n\t" \
5405 "sw $4, 24($29) \n\t" \
5406 "lw $4, 4(%1) \n\t" \
5407 "lw $5, 8(%1) \n\t" \
5408 "lw $6, 12(%1) \n\t" \
5409 "lw $7, 16(%1) \n\t" \
5410 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5411 VALGRIND_CALL_NOREDIR_T9 \
5412 "addu $29, $29, 32 \n\t" \
5413 "lw $28, 0($29) \n\t" \
5414 "lw $31, 4($29) \n\t" \
5415 "addu $29, $29, 8 \n\t" \
5416 "move %0, $2\n" \
5417 : /*out*/ "=r" (_res) \
5418 : /*in*/ "0" (&_argvec[0]) \
5419 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5420 ); \
5421 lval = (__typeof__(lval)) _res; \
5422 } while (0)
5423
5424 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5425 arg7,arg8) \
5426 do { \
5427 volatile OrigFn _orig = (orig); \
5428 volatile unsigned long _argvec[9]; \
5429 volatile unsigned long _res; \
5430 _argvec[0] = (unsigned long)_orig.nraddr; \
5431 _argvec[1] = (unsigned long)(arg1); \
5432 _argvec[2] = (unsigned long)(arg2); \
5433 _argvec[3] = (unsigned long)(arg3); \
5434 _argvec[4] = (unsigned long)(arg4); \
5435 _argvec[5] = (unsigned long)(arg5); \
5436 _argvec[6] = (unsigned long)(arg6); \
5437 _argvec[7] = (unsigned long)(arg7); \
5438 _argvec[8] = (unsigned long)(arg8); \
5439 __asm__ volatile( \
5440 "subu $29, $29, 8 \n\t" \
5441 "sw $28, 0($29) \n\t" \
5442 "sw $31, 4($29) \n\t" \
5443 "lw $4, 20(%1) \n\t" \
5444 "subu $29, $29, 40\n\t" \
5445 "sw $4, 16($29) \n\t" \
5446 "lw $4, 24(%1) \n\t" \
5447 "sw $4, 20($29) \n\t" \
5448 "lw $4, 28(%1) \n\t" \
5449 "sw $4, 24($29) \n\t" \
5450 "lw $4, 32(%1) \n\t" \
5451 "sw $4, 28($29) \n\t" \
5452 "lw $4, 4(%1) \n\t" \
5453 "lw $5, 8(%1) \n\t" \
5454 "lw $6, 12(%1) \n\t" \
5455 "lw $7, 16(%1) \n\t" \
5456 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5457 VALGRIND_CALL_NOREDIR_T9 \
5458 "addu $29, $29, 40 \n\t" \
5459 "lw $28, 0($29) \n\t" \
5460 "lw $31, 4($29) \n\t" \
5461 "addu $29, $29, 8 \n\t" \
5462 "move %0, $2\n" \
5463 : /*out*/ "=r" (_res) \
5464 : /*in*/ "0" (&_argvec[0]) \
5465 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5466 ); \
5467 lval = (__typeof__(lval)) _res; \
5468 } while (0)
5469
5470 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5471 arg7,arg8,arg9) \
5472 do { \
5473 volatile OrigFn _orig = (orig); \
5474 volatile unsigned long _argvec[10]; \
5475 volatile unsigned long _res; \
5476 _argvec[0] = (unsigned long)_orig.nraddr; \
5477 _argvec[1] = (unsigned long)(arg1); \
5478 _argvec[2] = (unsigned long)(arg2); \
5479 _argvec[3] = (unsigned long)(arg3); \
5480 _argvec[4] = (unsigned long)(arg4); \
5481 _argvec[5] = (unsigned long)(arg5); \
5482 _argvec[6] = (unsigned long)(arg6); \
5483 _argvec[7] = (unsigned long)(arg7); \
5484 _argvec[8] = (unsigned long)(arg8); \
5485 _argvec[9] = (unsigned long)(arg9); \
5486 __asm__ volatile( \
5487 "subu $29, $29, 8 \n\t" \
5488 "sw $28, 0($29) \n\t" \
5489 "sw $31, 4($29) \n\t" \
5490 "lw $4, 20(%1) \n\t" \
5491 "subu $29, $29, 40\n\t" \
5492 "sw $4, 16($29) \n\t" \
5493 "lw $4, 24(%1) \n\t" \
5494 "sw $4, 20($29) \n\t" \
5495 "lw $4, 28(%1) \n\t" \
5496 "sw $4, 24($29) \n\t" \
5497 "lw $4, 32(%1) \n\t" \
5498 "sw $4, 28($29) \n\t" \
5499 "lw $4, 36(%1) \n\t" \
5500 "sw $4, 32($29) \n\t" \
5501 "lw $4, 4(%1) \n\t" \
5502 "lw $5, 8(%1) \n\t" \
5503 "lw $6, 12(%1) \n\t" \
5504 "lw $7, 16(%1) \n\t" \
5505 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5506 VALGRIND_CALL_NOREDIR_T9 \
5507 "addu $29, $29, 40 \n\t" \
5508 "lw $28, 0($29) \n\t" \
5509 "lw $31, 4($29) \n\t" \
5510 "addu $29, $29, 8 \n\t" \
5511 "move %0, $2\n" \
5512 : /*out*/ "=r" (_res) \
5513 : /*in*/ "0" (&_argvec[0]) \
5514 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5515 ); \
5516 lval = (__typeof__(lval)) _res; \
5517 } while (0)
5518
5519 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5520 arg7,arg8,arg9,arg10) \
5521 do { \
5522 volatile OrigFn _orig = (orig); \
5523 volatile unsigned long _argvec[11]; \
5524 volatile unsigned long _res; \
5525 _argvec[0] = (unsigned long)_orig.nraddr; \
5526 _argvec[1] = (unsigned long)(arg1); \
5527 _argvec[2] = (unsigned long)(arg2); \
5528 _argvec[3] = (unsigned long)(arg3); \
5529 _argvec[4] = (unsigned long)(arg4); \
5530 _argvec[5] = (unsigned long)(arg5); \
5531 _argvec[6] = (unsigned long)(arg6); \
5532 _argvec[7] = (unsigned long)(arg7); \
5533 _argvec[8] = (unsigned long)(arg8); \
5534 _argvec[9] = (unsigned long)(arg9); \
5535 _argvec[10] = (unsigned long)(arg10); \
5536 __asm__ volatile( \
5537 "subu $29, $29, 8 \n\t" \
5538 "sw $28, 0($29) \n\t" \
5539 "sw $31, 4($29) \n\t" \
5540 "lw $4, 20(%1) \n\t" \
5541 "subu $29, $29, 48\n\t" \
5542 "sw $4, 16($29) \n\t" \
5543 "lw $4, 24(%1) \n\t" \
5544 "sw $4, 20($29) \n\t" \
5545 "lw $4, 28(%1) \n\t" \
5546 "sw $4, 24($29) \n\t" \
5547 "lw $4, 32(%1) \n\t" \
5548 "sw $4, 28($29) \n\t" \
5549 "lw $4, 36(%1) \n\t" \
5550 "sw $4, 32($29) \n\t" \
5551 "lw $4, 40(%1) \n\t" \
5552 "sw $4, 36($29) \n\t" \
5553 "lw $4, 4(%1) \n\t" \
5554 "lw $5, 8(%1) \n\t" \
5555 "lw $6, 12(%1) \n\t" \
5556 "lw $7, 16(%1) \n\t" \
5557 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5558 VALGRIND_CALL_NOREDIR_T9 \
5559 "addu $29, $29, 48 \n\t" \
5560 "lw $28, 0($29) \n\t" \
5561 "lw $31, 4($29) \n\t" \
5562 "addu $29, $29, 8 \n\t" \
5563 "move %0, $2\n" \
5564 : /*out*/ "=r" (_res) \
5565 : /*in*/ "0" (&_argvec[0]) \
5566 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5567 ); \
5568 lval = (__typeof__(lval)) _res; \
5569 } while (0)
5570
5571 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5572 arg6,arg7,arg8,arg9,arg10, \
5573 arg11) \
5574 do { \
5575 volatile OrigFn _orig = (orig); \
5576 volatile unsigned long _argvec[12]; \
5577 volatile unsigned long _res; \
5578 _argvec[0] = (unsigned long)_orig.nraddr; \
5579 _argvec[1] = (unsigned long)(arg1); \
5580 _argvec[2] = (unsigned long)(arg2); \
5581 _argvec[3] = (unsigned long)(arg3); \
5582 _argvec[4] = (unsigned long)(arg4); \
5583 _argvec[5] = (unsigned long)(arg5); \
5584 _argvec[6] = (unsigned long)(arg6); \
5585 _argvec[7] = (unsigned long)(arg7); \
5586 _argvec[8] = (unsigned long)(arg8); \
5587 _argvec[9] = (unsigned long)(arg9); \
5588 _argvec[10] = (unsigned long)(arg10); \
5589 _argvec[11] = (unsigned long)(arg11); \
5590 __asm__ volatile( \
5591 "subu $29, $29, 8 \n\t" \
5592 "sw $28, 0($29) \n\t" \
5593 "sw $31, 4($29) \n\t" \
5594 "lw $4, 20(%1) \n\t" \
5595 "subu $29, $29, 48\n\t" \
5596 "sw $4, 16($29) \n\t" \
5597 "lw $4, 24(%1) \n\t" \
5598 "sw $4, 20($29) \n\t" \
5599 "lw $4, 28(%1) \n\t" \
5600 "sw $4, 24($29) \n\t" \
5601 "lw $4, 32(%1) \n\t" \
5602 "sw $4, 28($29) \n\t" \
5603 "lw $4, 36(%1) \n\t" \
5604 "sw $4, 32($29) \n\t" \
5605 "lw $4, 40(%1) \n\t" \
5606 "sw $4, 36($29) \n\t" \
5607 "lw $4, 44(%1) \n\t" \
5608 "sw $4, 40($29) \n\t" \
5609 "lw $4, 4(%1) \n\t" \
5610 "lw $5, 8(%1) \n\t" \
5611 "lw $6, 12(%1) \n\t" \
5612 "lw $7, 16(%1) \n\t" \
5613 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5614 VALGRIND_CALL_NOREDIR_T9 \
5615 "addu $29, $29, 48 \n\t" \
5616 "lw $28, 0($29) \n\t" \
5617 "lw $31, 4($29) \n\t" \
5618 "addu $29, $29, 8 \n\t" \
5619 "move %0, $2\n" \
5620 : /*out*/ "=r" (_res) \
5621 : /*in*/ "0" (&_argvec[0]) \
5622 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5623 ); \
5624 lval = (__typeof__(lval)) _res; \
5625 } while (0)
5626
5627 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5628 arg6,arg7,arg8,arg9,arg10, \
5629 arg11,arg12) \
5630 do { \
5631 volatile OrigFn _orig = (orig); \
5632 volatile unsigned long _argvec[13]; \
5633 volatile unsigned long _res; \
5634 _argvec[0] = (unsigned long)_orig.nraddr; \
5635 _argvec[1] = (unsigned long)(arg1); \
5636 _argvec[2] = (unsigned long)(arg2); \
5637 _argvec[3] = (unsigned long)(arg3); \
5638 _argvec[4] = (unsigned long)(arg4); \
5639 _argvec[5] = (unsigned long)(arg5); \
5640 _argvec[6] = (unsigned long)(arg6); \
5641 _argvec[7] = (unsigned long)(arg7); \
5642 _argvec[8] = (unsigned long)(arg8); \
5643 _argvec[9] = (unsigned long)(arg9); \
5644 _argvec[10] = (unsigned long)(arg10); \
5645 _argvec[11] = (unsigned long)(arg11); \
5646 _argvec[12] = (unsigned long)(arg12); \
5647 __asm__ volatile( \
5648 "subu $29, $29, 8 \n\t" \
5649 "sw $28, 0($29) \n\t" \
5650 "sw $31, 4($29) \n\t" \
5651 "lw $4, 20(%1) \n\t" \
5652 "subu $29, $29, 56\n\t" \
5653 "sw $4, 16($29) \n\t" \
5654 "lw $4, 24(%1) \n\t" \
5655 "sw $4, 20($29) \n\t" \
5656 "lw $4, 28(%1) \n\t" \
5657 "sw $4, 24($29) \n\t" \
5658 "lw $4, 32(%1) \n\t" \
5659 "sw $4, 28($29) \n\t" \
5660 "lw $4, 36(%1) \n\t" \
5661 "sw $4, 32($29) \n\t" \
5662 "lw $4, 40(%1) \n\t" \
5663 "sw $4, 36($29) \n\t" \
5664 "lw $4, 44(%1) \n\t" \
5665 "sw $4, 40($29) \n\t" \
5666 "lw $4, 48(%1) \n\t" \
5667 "sw $4, 44($29) \n\t" \
5668 "lw $4, 4(%1) \n\t" \
5669 "lw $5, 8(%1) \n\t" \
5670 "lw $6, 12(%1) \n\t" \
5671 "lw $7, 16(%1) \n\t" \
5672 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5673 VALGRIND_CALL_NOREDIR_T9 \
5674 "addu $29, $29, 56 \n\t" \
5675 "lw $28, 0($29) \n\t" \
5676 "lw $31, 4($29) \n\t" \
5677 "addu $29, $29, 8 \n\t" \
5678 "move %0, $2\n" \
5679 : /*out*/ "=r" (_res) \
5680 : /*in*/ "r" (&_argvec[0]) \
5681 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5682 ); \
5683 lval = (__typeof__(lval)) _res; \
5684 } while (0)
5685
5686 #endif /* PLAT_mips32_linux */
5687
5688 /* ------------------------- mips64-linux ------------------------- */
5689
5690 #if defined(PLAT_mips64_linux)
5691
5692 /* These regs are trashed by the hidden call. */
5693 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5694 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5695 "$25", "$31"
5696
5697 /* These CALL_FN_ macros assume that on mips64-linux,
5698 sizeof(long long) == 8. */
5699
5700 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
5701
5702 #define CALL_FN_W_v(lval, orig) \
5703 do { \
5704 volatile OrigFn _orig = (orig); \
5705 volatile unsigned long long _argvec[1]; \
5706 volatile unsigned long long _res; \
5707 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5708 __asm__ volatile( \
5709 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5710 VALGRIND_CALL_NOREDIR_T9 \
5711 "move %0, $2\n" \
5712 : /*out*/ "=r" (_res) \
5713 : /*in*/ "0" (&_argvec[0]) \
5714 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5715 ); \
5716 lval = (__typeof__(lval)) (long)_res; \
5717 } while (0)
5718
5719 #define CALL_FN_W_W(lval, orig, arg1) \
5720 do { \
5721 volatile OrigFn _orig = (orig); \
5722 volatile unsigned long long _argvec[2]; \
5723 volatile unsigned long long _res; \
5724 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5725 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5726 __asm__ volatile( \
5727 "ld $4, 8(%1)\n\t" /* arg1*/ \
5728 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5729 VALGRIND_CALL_NOREDIR_T9 \
5730 "move %0, $2\n" \
5731 : /*out*/ "=r" (_res) \
5732 : /*in*/ "r" (&_argvec[0]) \
5733 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5734 ); \
5735 lval = (__typeof__(lval)) (long)_res; \
5736 } while (0)
5737
5738 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5739 do { \
5740 volatile OrigFn _orig = (orig); \
5741 volatile unsigned long long _argvec[3]; \
5742 volatile unsigned long long _res; \
5743 _argvec[0] = _orig.nraddr; \
5744 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5745 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5746 __asm__ volatile( \
5747 "ld $4, 8(%1)\n\t" \
5748 "ld $5, 16(%1)\n\t" \
5749 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5750 VALGRIND_CALL_NOREDIR_T9 \
5751 "move %0, $2\n" \
5752 : /*out*/ "=r" (_res) \
5753 : /*in*/ "r" (&_argvec[0]) \
5754 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5755 ); \
5756 lval = (__typeof__(lval)) (long)_res; \
5757 } while (0)
5758
5759
5760 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5761 do { \
5762 volatile OrigFn _orig = (orig); \
5763 volatile unsigned long long _argvec[4]; \
5764 volatile unsigned long long _res; \
5765 _argvec[0] = _orig.nraddr; \
5766 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5767 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5768 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5769 __asm__ volatile( \
5770 "ld $4, 8(%1)\n\t" \
5771 "ld $5, 16(%1)\n\t" \
5772 "ld $6, 24(%1)\n\t" \
5773 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5774 VALGRIND_CALL_NOREDIR_T9 \
5775 "move %0, $2\n" \
5776 : /*out*/ "=r" (_res) \
5777 : /*in*/ "r" (&_argvec[0]) \
5778 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5779 ); \
5780 lval = (__typeof__(lval)) (long)_res; \
5781 } while (0)
5782
5783 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5784 do { \
5785 volatile OrigFn _orig = (orig); \
5786 volatile unsigned long long _argvec[5]; \
5787 volatile unsigned long long _res; \
5788 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5789 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5790 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5791 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5792 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5793 __asm__ volatile( \
5794 "ld $4, 8(%1)\n\t" \
5795 "ld $5, 16(%1)\n\t" \
5796 "ld $6, 24(%1)\n\t" \
5797 "ld $7, 32(%1)\n\t" \
5798 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5799 VALGRIND_CALL_NOREDIR_T9 \
5800 "move %0, $2\n" \
5801 : /*out*/ "=r" (_res) \
5802 : /*in*/ "r" (&_argvec[0]) \
5803 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5804 ); \
5805 lval = (__typeof__(lval)) (long)_res; \
5806 } while (0)
5807
5808 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5809 do { \
5810 volatile OrigFn _orig = (orig); \
5811 volatile unsigned long long _argvec[6]; \
5812 volatile unsigned long long _res; \
5813 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5814 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5815 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5816 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5817 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5818 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5819 __asm__ volatile( \
5820 "ld $4, 8(%1)\n\t" \
5821 "ld $5, 16(%1)\n\t" \
5822 "ld $6, 24(%1)\n\t" \
5823 "ld $7, 32(%1)\n\t" \
5824 "ld $8, 40(%1)\n\t" \
5825 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5826 VALGRIND_CALL_NOREDIR_T9 \
5827 "move %0, $2\n" \
5828 : /*out*/ "=r" (_res) \
5829 : /*in*/ "r" (&_argvec[0]) \
5830 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5831 ); \
5832 lval = (__typeof__(lval)) (long)_res; \
5833 } while (0)
5834
5835 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5836 do { \
5837 volatile OrigFn _orig = (orig); \
5838 volatile unsigned long long _argvec[7]; \
5839 volatile unsigned long long _res; \
5840 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5841 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5842 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5843 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5844 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5845 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5846 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5847 __asm__ volatile( \
5848 "ld $4, 8(%1)\n\t" \
5849 "ld $5, 16(%1)\n\t" \
5850 "ld $6, 24(%1)\n\t" \
5851 "ld $7, 32(%1)\n\t" \
5852 "ld $8, 40(%1)\n\t" \
5853 "ld $9, 48(%1)\n\t" \
5854 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5855 VALGRIND_CALL_NOREDIR_T9 \
5856 "move %0, $2\n" \
5857 : /*out*/ "=r" (_res) \
5858 : /*in*/ "r" (&_argvec[0]) \
5859 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5860 ); \
5861 lval = (__typeof__(lval)) (long)_res; \
5862 } while (0)
5863
5864 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5865 arg7) \
5866 do { \
5867 volatile OrigFn _orig = (orig); \
5868 volatile unsigned long long _argvec[8]; \
5869 volatile unsigned long long _res; \
5870 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5871 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5872 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5873 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5874 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5875 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5876 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5877 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5878 __asm__ volatile( \
5879 "ld $4, 8(%1)\n\t" \
5880 "ld $5, 16(%1)\n\t" \
5881 "ld $6, 24(%1)\n\t" \
5882 "ld $7, 32(%1)\n\t" \
5883 "ld $8, 40(%1)\n\t" \
5884 "ld $9, 48(%1)\n\t" \
5885 "ld $10, 56(%1)\n\t" \
5886 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5887 VALGRIND_CALL_NOREDIR_T9 \
5888 "move %0, $2\n" \
5889 : /*out*/ "=r" (_res) \
5890 : /*in*/ "r" (&_argvec[0]) \
5891 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5892 ); \
5893 lval = (__typeof__(lval)) (long)_res; \
5894 } while (0)
5895
5896 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5897 arg7,arg8) \
5898 do { \
5899 volatile OrigFn _orig = (orig); \
5900 volatile unsigned long long _argvec[9]; \
5901 volatile unsigned long long _res; \
5902 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5903 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5904 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5905 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5906 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5907 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5908 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5909 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5910 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5911 __asm__ volatile( \
5912 "ld $4, 8(%1)\n\t" \
5913 "ld $5, 16(%1)\n\t" \
5914 "ld $6, 24(%1)\n\t" \
5915 "ld $7, 32(%1)\n\t" \
5916 "ld $8, 40(%1)\n\t" \
5917 "ld $9, 48(%1)\n\t" \
5918 "ld $10, 56(%1)\n\t" \
5919 "ld $11, 64(%1)\n\t" \
5920 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5921 VALGRIND_CALL_NOREDIR_T9 \
5922 "move %0, $2\n" \
5923 : /*out*/ "=r" (_res) \
5924 : /*in*/ "r" (&_argvec[0]) \
5925 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5926 ); \
5927 lval = (__typeof__(lval)) (long)_res; \
5928 } while (0)
5929
5930 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5931 arg7,arg8,arg9) \
5932 do { \
5933 volatile OrigFn _orig = (orig); \
5934 volatile unsigned long long _argvec[10]; \
5935 volatile unsigned long long _res; \
5936 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5937 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5938 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5939 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5940 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5941 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5942 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5943 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5944 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5945 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5946 __asm__ volatile( \
5947 "dsubu $29, $29, 8\n\t" \
5948 "ld $4, 72(%1)\n\t" \
5949 "sd $4, 0($29)\n\t" \
5950 "ld $4, 8(%1)\n\t" \
5951 "ld $5, 16(%1)\n\t" \
5952 "ld $6, 24(%1)\n\t" \
5953 "ld $7, 32(%1)\n\t" \
5954 "ld $8, 40(%1)\n\t" \
5955 "ld $9, 48(%1)\n\t" \
5956 "ld $10, 56(%1)\n\t" \
5957 "ld $11, 64(%1)\n\t" \
5958 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5959 VALGRIND_CALL_NOREDIR_T9 \
5960 "daddu $29, $29, 8\n\t" \
5961 "move %0, $2\n" \
5962 : /*out*/ "=r" (_res) \
5963 : /*in*/ "r" (&_argvec[0]) \
5964 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5965 ); \
5966 lval = (__typeof__(lval)) (long)_res; \
5967 } while (0)
5968
5969 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5970 arg7,arg8,arg9,arg10) \
5971 do { \
5972 volatile OrigFn _orig = (orig); \
5973 volatile unsigned long long _argvec[11]; \
5974 volatile unsigned long long _res; \
5975 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5976 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5977 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5978 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5979 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5980 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5981 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5982 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5983 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5984 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5985 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
5986 __asm__ volatile( \
5987 "dsubu $29, $29, 16\n\t" \
5988 "ld $4, 72(%1)\n\t" \
5989 "sd $4, 0($29)\n\t" \
5990 "ld $4, 80(%1)\n\t" \
5991 "sd $4, 8($29)\n\t" \
5992 "ld $4, 8(%1)\n\t" \
5993 "ld $5, 16(%1)\n\t" \
5994 "ld $6, 24(%1)\n\t" \
5995 "ld $7, 32(%1)\n\t" \
5996 "ld $8, 40(%1)\n\t" \
5997 "ld $9, 48(%1)\n\t" \
5998 "ld $10, 56(%1)\n\t" \
5999 "ld $11, 64(%1)\n\t" \
6000 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6001 VALGRIND_CALL_NOREDIR_T9 \
6002 "daddu $29, $29, 16\n\t" \
6003 "move %0, $2\n" \
6004 : /*out*/ "=r" (_res) \
6005 : /*in*/ "r" (&_argvec[0]) \
6006 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6007 ); \
6008 lval = (__typeof__(lval)) (long)_res; \
6009 } while (0)
6010
6011 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6012 arg6,arg7,arg8,arg9,arg10, \
6013 arg11) \
6014 do { \
6015 volatile OrigFn _orig = (orig); \
6016 volatile unsigned long long _argvec[12]; \
6017 volatile unsigned long long _res; \
6018 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6019 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6020 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6021 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6022 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6023 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6024 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6025 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6026 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6027 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6028 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6029 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6030 __asm__ volatile( \
6031 "dsubu $29, $29, 24\n\t" \
6032 "ld $4, 72(%1)\n\t" \
6033 "sd $4, 0($29)\n\t" \
6034 "ld $4, 80(%1)\n\t" \
6035 "sd $4, 8($29)\n\t" \
6036 "ld $4, 88(%1)\n\t" \
6037 "sd $4, 16($29)\n\t" \
6038 "ld $4, 8(%1)\n\t" \
6039 "ld $5, 16(%1)\n\t" \
6040 "ld $6, 24(%1)\n\t" \
6041 "ld $7, 32(%1)\n\t" \
6042 "ld $8, 40(%1)\n\t" \
6043 "ld $9, 48(%1)\n\t" \
6044 "ld $10, 56(%1)\n\t" \
6045 "ld $11, 64(%1)\n\t" \
6046 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6047 VALGRIND_CALL_NOREDIR_T9 \
6048 "daddu $29, $29, 24\n\t" \
6049 "move %0, $2\n" \
6050 : /*out*/ "=r" (_res) \
6051 : /*in*/ "r" (&_argvec[0]) \
6052 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6053 ); \
6054 lval = (__typeof__(lval)) (long)_res; \
6055 } while (0)
6056
6057 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6058 arg6,arg7,arg8,arg9,arg10, \
6059 arg11,arg12) \
6060 do { \
6061 volatile OrigFn _orig = (orig); \
6062 volatile unsigned long long _argvec[13]; \
6063 volatile unsigned long long _res; \
6064 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6065 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6066 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6067 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6068 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6069 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6070 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6071 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6072 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6073 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6074 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6075 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6076 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6077 __asm__ volatile( \
6078 "dsubu $29, $29, 32\n\t" \
6079 "ld $4, 72(%1)\n\t" \
6080 "sd $4, 0($29)\n\t" \
6081 "ld $4, 80(%1)\n\t" \
6082 "sd $4, 8($29)\n\t" \
6083 "ld $4, 88(%1)\n\t" \
6084 "sd $4, 16($29)\n\t" \
6085 "ld $4, 96(%1)\n\t" \
6086 "sd $4, 24($29)\n\t" \
6087 "ld $4, 8(%1)\n\t" \
6088 "ld $5, 16(%1)\n\t" \
6089 "ld $6, 24(%1)\n\t" \
6090 "ld $7, 32(%1)\n\t" \
6091 "ld $8, 40(%1)\n\t" \
6092 "ld $9, 48(%1)\n\t" \
6093 "ld $10, 56(%1)\n\t" \
6094 "ld $11, 64(%1)\n\t" \
6095 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6096 VALGRIND_CALL_NOREDIR_T9 \
6097 "daddu $29, $29, 32\n\t" \
6098 "move %0, $2\n" \
6099 : /*out*/ "=r" (_res) \
6100 : /*in*/ "r" (&_argvec[0]) \
6101 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6102 ); \
6103 lval = (__typeof__(lval)) (long)_res; \
6104 } while (0)
6105
6106 #endif /* PLAT_mips64_linux */
6107
6108 /* ------------------------------------------------------------------ */
6109 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6110 /* */
6111 /* ------------------------------------------------------------------ */
6112
6113 /* Some request codes. There are many more of these, but most are not
6114 exposed to end-user view. These are the public ones, all of the
6115 form 0x1000 + small_number.
6116
6117 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6118 ones start at 0x2000.
6119 */
6120
6121 /* These macros are used by tools -- they must be public, but don't
6122 embed them into other programs. */
6123 #define VG_USERREQ_TOOL_BASE(a,b) \
6124 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6125 #define VG_IS_TOOL_USERREQ(a, b, v) \
6126 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6127
6128 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6129 This enum comprises an ABI exported by Valgrind to programs
6130 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6131 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6132 relevant group. */
6133 typedef
6134 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6135 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6136
6137 /* These allow any function to be called from the simulated
6138 CPU but run on the real CPU. Nb: the first arg passed to
6139 the function is always the ThreadId of the running
6140 thread! So CLIENT_CALL0 actually requires a 1 arg
6141 function, etc. */
6142 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6143 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6144 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6145 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6146
6147 /* Can be useful in regression testing suites -- eg. can
6148 send Valgrind's output to /dev/null and still count
6149 errors. */
6150 VG_USERREQ__COUNT_ERRORS = 0x1201,
6151
6152 /* Allows the client program and/or gdbserver to execute a monitor
6153 command. */
6154 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6155
6156 /* These are useful and can be interpreted by any tool that
6157 tracks malloc() et al, by using vg_replace_malloc.c. */
6158 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6159 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6160 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6161 /* Memory pool support. */
6162 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6163 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6164 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6165 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6166 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6167 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6168 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6169 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6170
6171 /* Allow printfs to valgrind log. */
6172 /* The first two pass the va_list argument by value, which
6173 assumes it is the same size as or smaller than a UWord,
6174 which generally isn't the case. Hence are deprecated.
6175 The second two pass the vargs by reference and so are
6176 immune to this problem. */
6177 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6178 VG_USERREQ__PRINTF = 0x1401,
6179 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6180 /* both :: char* fmt, va_list* vargs */
6181 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6182 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6183
6184 /* Stack support. */
6185 VG_USERREQ__STACK_REGISTER = 0x1501,
6186 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6187 VG_USERREQ__STACK_CHANGE = 0x1503,
6188
6189 /* Wine support */
6190 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6191
6192 /* Querying of debug info. */
6193 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6194
6195 /* Disable/enable error reporting level. Takes a single
6196 Word arg which is the delta to this thread's error
6197 disablement indicator. Hence 1 disables or further
6198 disables errors, and -1 moves back towards enablement.
6199 Other values are not allowed. */
6200 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6201
6202 /* Some requests used for Valgrind internal, such as
6203 self-test or self-hosting. */
6204 /* Initialise IR injection */
6205 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6206 /* Used by Inner Valgrind to inform Outer Valgrind where to
6207 find the list of inner guest threads */
6208 VG_USERREQ__INNER_THREADS = 0x1902
6209 } Vg_ClientRequest;
6210
6211 #if !defined(__GNUC__)
6212 # define __extension__ /* */
6213 #endif
6214
6215
6216 /* Returns the number of Valgrinds this code is running under. That
6217 is, 0 if running natively, 1 if running under Valgrind, 2 if
6218 running under Valgrind which is running under another Valgrind,
6219 etc. */
6220 #define RUNNING_ON_VALGRIND \
6221 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6222 VG_USERREQ__RUNNING_ON_VALGRIND, \
6223 0, 0, 0, 0, 0) \
6224
6225
6226 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6227 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6228 since it provides a way to make sure valgrind will retranslate the
6229 invalidated area. Returns no value. */
6230 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6231 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6232 _qzz_addr, _qzz_len, 0, 0, 0)
6233
6234 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6235 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6236 _qzz_addr, 0, 0, 0, 0)
6237
6238
6239 /* These requests are for getting Valgrind itself to print something.
6240 Possibly with a backtrace. This is a really ugly hack. The return value
6241 is the number of characters printed, excluding the "**<pid>** " part at the
6242 start and the backtrace (if present). */
6243
6244 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6245 /* Modern GCC will optimize the static routine out if unused,
6246 and unused attribute will shut down warnings about it. */
6247 static int VALGRIND_PRINTF(const char *format, ...)
6248 __attribute__((format(__printf__, 1, 2), __unused__));
6249 #endif
6250 static int
6251 #if defined(_MSC_VER)
6252 __inline
6253 #endif
VALGRIND_PRINTF(const char * format,...)6254 VALGRIND_PRINTF(const char *format, ...)
6255 {
6256 #if defined(NVALGRIND)
6257 (void)format;
6258 return 0;
6259 #else /* NVALGRIND */
6260 #if defined(_MSC_VER) || defined(__MINGW64__)
6261 uintptr_t _qzz_res;
6262 #else
6263 unsigned long _qzz_res;
6264 #endif
6265 va_list vargs;
6266 va_start(vargs, format);
6267 #if defined(_MSC_VER) || defined(__MINGW64__)
6268 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6269 VG_USERREQ__PRINTF_VALIST_BY_REF,
6270 (uintptr_t)format,
6271 (uintptr_t)&vargs,
6272 0, 0, 0);
6273 #else
6274 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6275 VG_USERREQ__PRINTF_VALIST_BY_REF,
6276 (unsigned long)format,
6277 (unsigned long)&vargs,
6278 0, 0, 0);
6279 #endif
6280 va_end(vargs);
6281 return (int)_qzz_res;
6282 #endif /* NVALGRIND */
6283 }
6284
6285 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6286 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6287 __attribute__((format(__printf__, 1, 2), __unused__));
6288 #endif
6289 static int
6290 #if defined(_MSC_VER)
6291 __inline
6292 #endif
VALGRIND_PRINTF_BACKTRACE(const char * format,...)6293 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6294 {
6295 #if defined(NVALGRIND)
6296 (void)format;
6297 return 0;
6298 #else /* NVALGRIND */
6299 #if defined(_MSC_VER) || defined(__MINGW64__)
6300 uintptr_t _qzz_res;
6301 #else
6302 unsigned long _qzz_res;
6303 #endif
6304 va_list vargs;
6305 va_start(vargs, format);
6306 #if defined(_MSC_VER) || defined(__MINGW64__)
6307 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6308 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6309 (uintptr_t)format,
6310 (uintptr_t)&vargs,
6311 0, 0, 0);
6312 #else
6313 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6314 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6315 (unsigned long)format,
6316 (unsigned long)&vargs,
6317 0, 0, 0);
6318 #endif
6319 va_end(vargs);
6320 return (int)_qzz_res;
6321 #endif /* NVALGRIND */
6322 }
6323
6324
6325 /* These requests allow control to move from the simulated CPU to the
6326 real CPU, calling an arbitrary function.
6327
6328 Note that the current ThreadId is inserted as the first argument.
6329 So this call:
6330
6331 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6332
6333 requires f to have this signature:
6334
6335 Word f(Word tid, Word arg1, Word arg2)
6336
6337 where "Word" is a word-sized type.
6338
6339 Note that these client requests are not entirely reliable. For example,
6340 if you call a function with them that subsequently calls printf(),
6341 there's a high chance Valgrind will crash. Generally, your prospects of
6342 these working are made higher if the called function does not refer to
6343 any global variables, and does not refer to any libc or other functions
6344 (printf et al). Any kind of entanglement with libc or dynamic linking is
6345 likely to have a bad outcome, for tricky reasons which we've grappled
6346 with a lot in the past.
6347 */
6348 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6349 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6350 VG_USERREQ__CLIENT_CALL0, \
6351 _qyy_fn, \
6352 0, 0, 0, 0)
6353
6354 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6355 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6356 VG_USERREQ__CLIENT_CALL1, \
6357 _qyy_fn, \
6358 _qyy_arg1, 0, 0, 0)
6359
6360 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6361 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6362 VG_USERREQ__CLIENT_CALL2, \
6363 _qyy_fn, \
6364 _qyy_arg1, _qyy_arg2, 0, 0)
6365
6366 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6367 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6368 VG_USERREQ__CLIENT_CALL3, \
6369 _qyy_fn, \
6370 _qyy_arg1, _qyy_arg2, \
6371 _qyy_arg3, 0)
6372
6373
6374 /* Counts the number of errors that have been recorded by a tool. Nb:
6375 the tool must record the errors with VG_(maybe_record_error)() or
6376 VG_(unique_error)() for them to be counted. */
6377 #define VALGRIND_COUNT_ERRORS \
6378 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6379 0 /* default return */, \
6380 VG_USERREQ__COUNT_ERRORS, \
6381 0, 0, 0, 0, 0)
6382
6383 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6384 when heap blocks are allocated in order to give accurate results. This
6385 happens automatically for the standard allocator functions such as
6386 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6387 delete[], etc.
6388
6389 But if your program uses a custom allocator, this doesn't automatically
6390 happen, and Valgrind will not do as well. For example, if you allocate
6391 superblocks with mmap() and then allocates chunks of the superblocks, all
6392 Valgrind's observations will be at the mmap() level and it won't know that
6393 the chunks should be considered separate entities. In Memcheck's case,
6394 that means you probably won't get heap block overrun detection (because
6395 there won't be redzones marked as unaddressable) and you definitely won't
6396 get any leak detection.
6397
6398 The following client requests allow a custom allocator to be annotated so
6399 that it can be handled accurately by Valgrind.
6400
6401 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6402 by a malloc()-like function. For Memcheck (an illustrative case), this
6403 does two things:
6404
6405 - It records that the block has been allocated. This means any addresses
6406 within the block mentioned in error messages will be
6407 identified as belonging to the block. It also means that if the block
6408 isn't freed it will be detected by the leak checker.
6409
6410 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6411 not set), or addressable and defined (if 'is_zeroed' is set). This
6412 controls how accesses to the block by the program are handled.
6413
6414 'addr' is the start of the usable block (ie. after any
6415 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6416 can apply redzones -- these are blocks of padding at the start and end of
6417 each block. Adding redzones is recommended as it makes it much more likely
6418 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6419 zeroed (or filled with another predictable value), as is the case for
6420 calloc().
6421
6422 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6423 heap block -- that will be used by the client program -- is allocated.
6424 It's best to put it at the outermost level of the allocator if possible;
6425 for example, if you have a function my_alloc() which calls
6426 internal_alloc(), and the client request is put inside internal_alloc(),
6427 stack traces relating to the heap block will contain entries for both
6428 my_alloc() and internal_alloc(), which is probably not what you want.
6429
6430 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6431 custom blocks from within a heap block, B, that has been allocated with
6432 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6433 -- the custom blocks will take precedence.
6434
6435 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6436 Memcheck, it does two things:
6437
6438 - It records that the block has been deallocated. This assumes that the
6439 block was annotated as having been allocated via
6440 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6441
6442 - It marks the block as being unaddressable.
6443
6444 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6445 heap block is deallocated.
6446
6447 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6448 Memcheck, it does four things:
6449
6450 - It records that the size of a block has been changed. This assumes that
6451 the block was annotated as having been allocated via
6452 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6453
6454 - If the block shrunk, it marks the freed memory as being unaddressable.
6455
6456 - If the block grew, it marks the new area as undefined and defines a red
6457 zone past the end of the new block.
6458
6459 - The V-bits of the overlap between the old and the new block are preserved.
6460
6461 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6462 and before deallocation of the old block.
6463
6464 In many cases, these three client requests will not be enough to get your
6465 allocator working well with Memcheck. More specifically, if your allocator
6466 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6467 will be necessary to mark the memory as addressable just before the zeroing
6468 occurs, otherwise you'll get a lot of invalid write errors. For example,
6469 you'll need to do this if your allocator recycles freed blocks, but it
6470 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6471 Alternatively, if your allocator reuses freed blocks for allocator-internal
6472 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6473
6474 Really, what's happening is a blurring of the lines between the client
6475 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6476 memory should be considered unaddressable to the client program, but the
6477 allocator knows more than the rest of the client program and so may be able
6478 to safely access it. Extra client requests are necessary for Valgrind to
6479 understand the distinction between the allocator and the rest of the
6480 program.
6481
6482 Ignored if addr == 0.
6483 */
6484 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6485 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6486 addr, sizeB, rzB, is_zeroed, 0)
6487
6488 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6489 Ignored if addr == 0.
6490 */
6491 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6492 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6493 addr, oldSizeB, newSizeB, rzB, 0)
6494
6495 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6496 Ignored if addr == 0.
6497 */
6498 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6499 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6500 addr, rzB, 0, 0, 0)
6501
6502 /* Create a memory pool. */
6503 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6504 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6505 pool, rzB, is_zeroed, 0, 0)
6506
6507 /* Create a memory pool with some flags specifying extended behaviour.
6508 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6509
6510 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6511 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6512 by the application as superblocks to dole out MALLOC_LIKE blocks using
6513 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6514 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6515 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6516 Note that the association between the pool and the second level blocks
6517 is implicit : second level blocks will be located inside first level
6518 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6519 for such 2 levels pools, as otherwise valgrind will detect overlapping
6520 memory blocks, and will abort execution (e.g. during leak search).
6521
6522 Such a meta pool can also be marked as an 'auto free' pool using the flag
6523 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6524 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6525 will automatically free the second level blocks that are contained
6526 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6527 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6528 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6529 in the first level block.
6530 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6531 without the VALGRIND_MEMPOOL_METAPOOL flag.
6532 */
6533 #define VALGRIND_MEMPOOL_AUTO_FREE 1
6534 #define VALGRIND_MEMPOOL_METAPOOL 2
6535 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6536 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6537 pool, rzB, is_zeroed, flags, 0)
6538
6539 /* Destroy a memory pool. */
6540 #define VALGRIND_DESTROY_MEMPOOL(pool) \
6541 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6542 pool, 0, 0, 0, 0)
6543
6544 /* Associate a piece of memory with a memory pool. */
6545 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6546 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6547 pool, addr, size, 0, 0)
6548
6549 /* Disassociate a piece of memory from a memory pool. */
6550 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
6551 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6552 pool, addr, 0, 0, 0)
6553
6554 /* Disassociate any pieces outside a particular range. */
6555 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6556 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6557 pool, addr, size, 0, 0)
6558
6559 /* Resize and/or move a piece associated with a memory pool. */
6560 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6561 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6562 poolA, poolB, 0, 0, 0)
6563
6564 /* Resize and/or move a piece associated with a memory pool. */
6565 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6566 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6567 pool, addrA, addrB, size, 0)
6568
6569 /* Return 1 if a mempool exists, else 0. */
6570 #define VALGRIND_MEMPOOL_EXISTS(pool) \
6571 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6572 VG_USERREQ__MEMPOOL_EXISTS, \
6573 pool, 0, 0, 0, 0)
6574
6575 /* Mark a piece of memory as being a stack. Returns a stack id.
6576 start is the lowest addressable stack byte, end is the highest
6577 addressable stack byte. */
6578 #define VALGRIND_STACK_REGISTER(start, end) \
6579 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6580 VG_USERREQ__STACK_REGISTER, \
6581 start, end, 0, 0, 0)
6582
6583 /* Unmark the piece of memory associated with a stack id as being a
6584 stack. */
6585 #define VALGRIND_STACK_DEREGISTER(id) \
6586 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6587 id, 0, 0, 0, 0)
6588
6589 /* Change the start and end address of the stack id.
6590 start is the new lowest addressable stack byte, end is the new highest
6591 addressable stack byte. */
6592 #define VALGRIND_STACK_CHANGE(id, start, end) \
6593 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6594 id, start, end, 0, 0)
6595
6596 /* Load PDB debug info for Wine PE image_map. */
6597 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6598 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6599 fd, ptr, total_size, delta, 0)
6600
6601 /* Map a code address to a source file name and line number. buf64
6602 must point to a 64-byte buffer in the caller's address space. The
6603 result will be dumped in there and is guaranteed to be zero
6604 terminated. If no info is found, the first byte is set to zero. */
6605 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6606 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6607 VG_USERREQ__MAP_IP_TO_SRCLOC, \
6608 addr, buf64, 0, 0, 0)
6609
6610 /* Disable error reporting for this thread. Behaves in a stack like
6611 way, so you can safely call this multiple times provided that
6612 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6613 to re-enable reporting. The first call of this macro disables
6614 reporting. Subsequent calls have no effect except to increase the
6615 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6616 reporting. Child threads do not inherit this setting from their
6617 parents -- they are always created with reporting enabled. */
6618 #define VALGRIND_DISABLE_ERROR_REPORTING \
6619 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6620 1, 0, 0, 0, 0)
6621
6622 /* Re-enable error reporting, as per comments on
6623 VALGRIND_DISABLE_ERROR_REPORTING. */
6624 #define VALGRIND_ENABLE_ERROR_REPORTING \
6625 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6626 -1, 0, 0, 0, 0)
6627
6628 /* Execute a monitor command from the client program.
6629 If a connection is opened with GDB, the output will be sent
6630 according to the output mode set for vgdb.
6631 If no connection is opened, output will go to the log output.
6632 Returns 1 if command not recognised, 0 otherwise. */
6633 #define VALGRIND_MONITOR_COMMAND(command) \
6634 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6635 command, 0, 0, 0, 0)
6636
6637
6638 #undef PLAT_x86_darwin
6639 #undef PLAT_amd64_darwin
6640 #undef PLAT_x86_win32
6641 #undef PLAT_amd64_win64
6642 #undef PLAT_x86_linux
6643 #undef PLAT_amd64_linux
6644 #undef PLAT_ppc32_linux
6645 #undef PLAT_ppc64be_linux
6646 #undef PLAT_ppc64le_linux
6647 #undef PLAT_arm_linux
6648 #undef PLAT_s390x_linux
6649 #undef PLAT_mips32_linux
6650 #undef PLAT_mips64_linux
6651 #undef PLAT_x86_solaris
6652 #undef PLAT_amd64_solaris
6653
6654 #endif /* __VALGRIND_H */
6655