blob: e59a7fde3211fb1ecbba4438df6ad767ef0008c3 [file] [log] [blame]
Sean Anderson3b004842022-03-23 14:04:48 -04001/* SPDX-License-Identifier: GPL-2.0+ AND bzip2-1.0.6 */
2/*
3 This file is part of Valgrind, a dynamic binary instrumentation
4 framework.
5
6 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
7 Copyright (C) 2021 Sean Anderson <seanga2@gmail.com>
8*/
9
10/* This file is for inclusion into client (your!) code.
11
12 You can use these macros to manipulate and query Valgrind's
13 execution inside your own programs.
14
15 The resulting executables will still run without Valgrind, just a
16 little bit more slowly than they otherwise would, but otherwise
17 unchanged. When not running on valgrind, each client request
18 consumes very few (eg. 7) instructions, so the resulting performance
19 loss is negligible unless you plan to execute client requests
20 millions of times per second. Nevertheless, if that is still a
21 problem, you can compile with the NVALGRIND symbol defined (gcc
22 -DNVALGRIND) so that client requests are not even compiled in. */
23
24#ifndef __VALGRIND_H
25#define __VALGRIND_H
26
27
28/* ------------------------------------------------------------------ */
29/* VERSION NUMBER OF VALGRIND */
30/* ------------------------------------------------------------------ */
31
32/* Specify Valgrind's version number, so that user code can
33 conditionally compile based on our version number. Note that these
34 were introduced at version 3.6 and so do not exist in version 3.5
35 or earlier. The recommended way to use them to check for "version
36 X.Y or later" is (eg)
37
38#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
39 && (__VALGRIND_MAJOR__ > 3 \
40 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
41*/
42#define __VALGRIND_MAJOR__ 3
43#define __VALGRIND_MINOR__ 16
44
45
46#include <stdarg.h>
47
48/* Nb: this file might be included in a file compiled with -ansi. So
49 we can't use C++ style "//" comments nor the "asm" keyword (instead
50 use "__asm__"). */
51
52/* Derive some tags indicating what the target platform is. Note
53 that in this file we're using the compiler's CPP symbols for
54 identifying architectures, which are different to the ones we use
55 within the rest of Valgrind. Note, __powerpc__ is active for both
56 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
57 latter (on Linux, that is).
58
59 Misc note: how to find out what's predefined in gcc by default:
60 gcc -Wp,-dM somefile.c
61*/
62#undef PLAT_x86_darwin
63#undef PLAT_amd64_darwin
64#undef PLAT_x86_win32
65#undef PLAT_amd64_win64
66#undef PLAT_x86_linux
67#undef PLAT_amd64_linux
68#undef PLAT_ppc32_linux
69#undef PLAT_ppc64be_linux
70#undef PLAT_ppc64le_linux
71#undef PLAT_arm_linux
72#undef PLAT_arm64_linux
73#undef PLAT_s390x_linux
74#undef PLAT_mips32_linux
75#undef PLAT_mips64_linux
76#undef PLAT_nanomips_linux
77#undef PLAT_x86_solaris
78#undef PLAT_amd64_solaris
79
80
81#if defined(__APPLE__) && defined(__i386__)
82# define PLAT_x86_darwin 1
83#elif defined(__APPLE__) && defined(__x86_64__)
84# define PLAT_amd64_darwin 1
85#elif (defined(__MINGW32__) && defined(__i386__)) \
86 || defined(__CYGWIN32__) \
87 || (defined(_WIN32) && defined(_M_IX86))
88# define PLAT_x86_win32 1
89#elif (defined(__MINGW32__) && defined(__x86_64__)) \
90 || (defined(_WIN32) && defined(_M_X64))
91/* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
92# define PLAT_amd64_win64 1
93#elif defined(__linux__) && defined(__i386__)
94# define PLAT_x86_linux 1
95#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
96# define PLAT_amd64_linux 1
97#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
98# define PLAT_ppc32_linux 1
99#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
100/* Big Endian uses ELF version 1 */
101# define PLAT_ppc64be_linux 1
102#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
103/* Little Endian uses ELF version 2 */
104# define PLAT_ppc64le_linux 1
105#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
106# define PLAT_arm_linux 1
107#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
108# define PLAT_arm64_linux 1
109#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
110# define PLAT_s390x_linux 1
111#elif defined(__linux__) && defined(__mips__) && (__mips==64)
112# define PLAT_mips64_linux 1
113#elif defined(__linux__) && defined(__mips__) && (__mips==32)
114# define PLAT_mips32_linux 1
115#elif defined(__linux__) && defined(__nanomips__)
116# define PLAT_nanomips_linux 1
117#elif defined(__sun) && defined(__i386__)
118# define PLAT_x86_solaris 1
119#elif defined(__sun) && defined(__x86_64__)
120# define PLAT_amd64_solaris 1
121#else
122/* If we're not compiling for our target platform, don't generate
123 any inline asms. */
124# undef CONFIG_VALGRIND
125#endif
126
127
128/* ------------------------------------------------------------------ */
129/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
130/* in here of use to end-users -- skip to the next section. */
131/* ------------------------------------------------------------------ */
132
133/*
134 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
135 * request. Accepts both pointers and integers as arguments.
136 *
137 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
138 * client request that does not return a value.
139
140 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
141 * client request and whose value equals the client request result. Accepts
142 * both pointers and integers as arguments. Note that such calls are not
143 * necessarily pure functions -- they may have side effects.
144 */
145
146#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
147 _zzq_request, _zzq_arg1, _zzq_arg2, \
148 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
149 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
150 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
151 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
152
153#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
154 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
155 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
156 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
157 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
158
159#if !IS_ENABLED(CONFIG_VALGRIND)
160
161/* Define NVALGRIND to completely remove the Valgrind magic sequence
162 from the compiled code (analogous to NDEBUG's effects on
163 assert()) */
164#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
165 _zzq_default, _zzq_request, \
166 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
167 (_zzq_default)
168
169#else /* ! CONFIG_VALGRIND */
170
171/* The following defines the magic code sequences which the JITter
172 spots and handles magically. Don't look too closely at them as
173 they will rot your brain.
174
175 The assembly code sequences for all architectures is in this one
176 file. This is because this file must be stand-alone, and we don't
177 want to have multiple files.
178
179 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
180 value gets put in the return slot, so that everything works when
181 this is executed not under Valgrind. Args are passed in a memory
182 block, and so there's no intrinsic limit to the number that could
183 be passed, but it's currently five.
184
185 The macro args are:
186 _zzq_rlval result lvalue
187 _zzq_default default value (result returned when running on real CPU)
188 _zzq_request request code
189 _zzq_arg1..5 request params
190
191 The other two macros are used to support function wrapping, and are
192 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
193 guest's NRADDR pseudo-register and whatever other information is
194 needed to safely run the call original from the wrapper: on
195 ppc64-linux, the R2 value at the divert point is also needed. This
196 information is abstracted into a user-visible type, OrigFn.
197
198 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
199 guest, but guarantees that the branch instruction will not be
200 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
201 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
202 complete inline asm, since it needs to be combined with more magic
203 inline asm stuff to be useful.
204*/
205
206/* ----------------- x86-{linux,darwin,solaris} ---------------- */
207
208#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
209 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
210 || defined(PLAT_x86_solaris)
211
212typedef
213 struct {
214 unsigned int nraddr; /* where's the code? */
215 }
216 OrigFn;
217
218#define __SPECIAL_INSTRUCTION_PREAMBLE \
219 "roll $3, %%edi ; roll $13, %%edi\n\t" \
220 "roll $29, %%edi ; roll $19, %%edi\n\t"
221
222#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
223 _zzq_default, _zzq_request, \
224 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
225 __extension__ \
226 ({volatile unsigned int _zzq_args[6]; \
227 volatile unsigned int _zzq_result; \
228 _zzq_args[0] = (unsigned int)(_zzq_request); \
229 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
230 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
231 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
232 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
233 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
234 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
235 /* %EDX = client_request ( %EAX ) */ \
236 "xchgl %%ebx,%%ebx" \
237 : "=d" (_zzq_result) \
238 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
239 : "cc", "memory" \
240 ); \
241 _zzq_result; \
242 })
243
244#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
245 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
246 volatile unsigned int __addr; \
247 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
248 /* %EAX = guest_NRADDR */ \
249 "xchgl %%ecx,%%ecx" \
250 : "=a" (__addr) \
251 : \
252 : "cc", "memory" \
253 ); \
254 _zzq_orig->nraddr = __addr; \
255 }
256
257#define VALGRIND_CALL_NOREDIR_EAX \
258 __SPECIAL_INSTRUCTION_PREAMBLE \
259 /* call-noredir *%EAX */ \
260 "xchgl %%edx,%%edx\n\t"
261
262#define VALGRIND_VEX_INJECT_IR() \
263 do { \
264 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
265 "xchgl %%edi,%%edi\n\t" \
266 : : : "cc", "memory" \
267 ); \
268 } while (0)
269
270#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
271 || PLAT_x86_solaris */
272
273/* ------------------------- x86-Win32 ------------------------- */
274
275#if defined(PLAT_x86_win32) && !defined(__GNUC__)
276
277typedef
278 struct {
279 unsigned int nraddr; /* where's the code? */
280 }
281 OrigFn;
282
283#if defined(_MSC_VER)
284
285#define __SPECIAL_INSTRUCTION_PREAMBLE \
286 __asm rol edi, 3 __asm rol edi, 13 \
287 __asm rol edi, 29 __asm rol edi, 19
288
289#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
290 _zzq_default, _zzq_request, \
291 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
292 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
293 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
294 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
295 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
296
297static __inline uintptr_t
298valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
299 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
300 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
301 uintptr_t _zzq_arg5)
302{
303 volatile uintptr_t _zzq_args[6];
304 volatile unsigned int _zzq_result;
305 _zzq_args[0] = (uintptr_t)(_zzq_request);
306 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
307 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
308 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
309 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
310 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
311 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
312 __SPECIAL_INSTRUCTION_PREAMBLE
313 /* %EDX = client_request ( %EAX ) */
314 __asm xchg ebx,ebx
315 __asm mov _zzq_result, edx
316 }
317 return _zzq_result;
318}
319
320#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
321 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
322 volatile unsigned int __addr; \
323 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
324 /* %EAX = guest_NRADDR */ \
325 __asm xchg ecx,ecx \
326 __asm mov __addr, eax \
327 } \
328 _zzq_orig->nraddr = __addr; \
329 }
330
331#define VALGRIND_CALL_NOREDIR_EAX ERROR
332
333#define VALGRIND_VEX_INJECT_IR() \
334 do { \
335 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
336 __asm xchg edi,edi \
337 } \
338 } while (0)
339
340#else
341#error Unsupported compiler.
342#endif
343
344#endif /* PLAT_x86_win32 */
345
346/* ----------------- amd64-{linux,darwin,solaris} --------------- */
347
348#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
349 || defined(PLAT_amd64_solaris) \
350 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
351
352typedef
353 struct {
354 unsigned long int nraddr; /* where's the code? */
355 }
356 OrigFn;
357
358#define __SPECIAL_INSTRUCTION_PREAMBLE \
359 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
360 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
361
362#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
363 _zzq_default, _zzq_request, \
364 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
365 __extension__ \
366 ({ volatile unsigned long int _zzq_args[6]; \
367 volatile unsigned long int _zzq_result; \
368 _zzq_args[0] = (unsigned long int)(_zzq_request); \
369 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
370 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
371 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
372 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
373 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
374 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
375 /* %RDX = client_request ( %RAX ) */ \
376 "xchgq %%rbx,%%rbx" \
377 : "=d" (_zzq_result) \
378 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
379 : "cc", "memory" \
380 ); \
381 _zzq_result; \
382 })
383
384#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
385 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
386 volatile unsigned long int __addr; \
387 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
388 /* %RAX = guest_NRADDR */ \
389 "xchgq %%rcx,%%rcx" \
390 : "=a" (__addr) \
391 : \
392 : "cc", "memory" \
393 ); \
394 _zzq_orig->nraddr = __addr; \
395 }
396
397#define VALGRIND_CALL_NOREDIR_RAX \
398 __SPECIAL_INSTRUCTION_PREAMBLE \
399 /* call-noredir *%RAX */ \
400 "xchgq %%rdx,%%rdx\n\t"
401
402#define VALGRIND_VEX_INJECT_IR() \
403 do { \
404 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
405 "xchgq %%rdi,%%rdi\n\t" \
406 : : : "cc", "memory" \
407 ); \
408 } while (0)
409
410#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
411
412/* ------------------------- amd64-Win64 ------------------------- */
413
414#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
415
416#error Unsupported compiler.
417
418#endif /* PLAT_amd64_win64 */
419
420/* ------------------------ ppc32-linux ------------------------ */
421
422#if defined(PLAT_ppc32_linux)
423
424typedef
425 struct {
426 unsigned int nraddr; /* where's the code? */
427 }
428 OrigFn;
429
430#define __SPECIAL_INSTRUCTION_PREAMBLE \
431 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
432 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
433
434#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
435 _zzq_default, _zzq_request, \
436 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
437 \
438 __extension__ \
439 ({ unsigned int _zzq_args[6]; \
440 unsigned int _zzq_result; \
441 unsigned int* _zzq_ptr; \
442 _zzq_args[0] = (unsigned int)(_zzq_request); \
443 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
444 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
445 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
446 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
447 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
448 _zzq_ptr = _zzq_args; \
449 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
450 "mr 4,%2\n\t" /*ptr*/ \
451 __SPECIAL_INSTRUCTION_PREAMBLE \
452 /* %R3 = client_request ( %R4 ) */ \
453 "or 1,1,1\n\t" \
454 "mr %0,3" /*result*/ \
455 : "=b" (_zzq_result) \
456 : "b" (_zzq_default), "b" (_zzq_ptr) \
457 : "cc", "memory", "r3", "r4"); \
458 _zzq_result; \
459 })
460
461#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
462 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
463 unsigned int __addr; \
464 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
465 /* %R3 = guest_NRADDR */ \
466 "or 2,2,2\n\t" \
467 "mr %0,3" \
468 : "=b" (__addr) \
469 : \
470 : "cc", "memory", "r3" \
471 ); \
472 _zzq_orig->nraddr = __addr; \
473 }
474
475#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
476 __SPECIAL_INSTRUCTION_PREAMBLE \
477 /* branch-and-link-to-noredir *%R11 */ \
478 "or 3,3,3\n\t"
479
480#define VALGRIND_VEX_INJECT_IR() \
481 do { \
482 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
483 "or 5,5,5\n\t" \
484 ); \
485 } while (0)
486
487#endif /* PLAT_ppc32_linux */
488
489/* ------------------------ ppc64-linux ------------------------ */
490
491#if defined(PLAT_ppc64be_linux)
492
493typedef
494 struct {
495 unsigned long int nraddr; /* where's the code? */
496 unsigned long int r2; /* what tocptr do we need? */
497 }
498 OrigFn;
499
500#define __SPECIAL_INSTRUCTION_PREAMBLE \
501 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
502 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
503
504#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
505 _zzq_default, _zzq_request, \
506 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
507 \
508 __extension__ \
509 ({ unsigned long int _zzq_args[6]; \
510 unsigned long int _zzq_result; \
511 unsigned long int* _zzq_ptr; \
512 _zzq_args[0] = (unsigned long int)(_zzq_request); \
513 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
514 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
515 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
516 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
517 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
518 _zzq_ptr = _zzq_args; \
519 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
520 "mr 4,%2\n\t" /*ptr*/ \
521 __SPECIAL_INSTRUCTION_PREAMBLE \
522 /* %R3 = client_request ( %R4 ) */ \
523 "or 1,1,1\n\t" \
524 "mr %0,3" /*result*/ \
525 : "=b" (_zzq_result) \
526 : "b" (_zzq_default), "b" (_zzq_ptr) \
527 : "cc", "memory", "r3", "r4"); \
528 _zzq_result; \
529 })
530
531#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
532 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
533 unsigned long int __addr; \
534 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
535 /* %R3 = guest_NRADDR */ \
536 "or 2,2,2\n\t" \
537 "mr %0,3" \
538 : "=b" (__addr) \
539 : \
540 : "cc", "memory", "r3" \
541 ); \
542 _zzq_orig->nraddr = __addr; \
543 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
544 /* %R3 = guest_NRADDR_GPR2 */ \
545 "or 4,4,4\n\t" \
546 "mr %0,3" \
547 : "=b" (__addr) \
548 : \
549 : "cc", "memory", "r3" \
550 ); \
551 _zzq_orig->r2 = __addr; \
552 }
553
554#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
555 __SPECIAL_INSTRUCTION_PREAMBLE \
556 /* branch-and-link-to-noredir *%R11 */ \
557 "or 3,3,3\n\t"
558
559#define VALGRIND_VEX_INJECT_IR() \
560 do { \
561 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
562 "or 5,5,5\n\t" \
563 ); \
564 } while (0)
565
566#endif /* PLAT_ppc64be_linux */
567
568#if defined(PLAT_ppc64le_linux)
569
570typedef
571 struct {
572 unsigned long int nraddr; /* where's the code? */
573 unsigned long int r2; /* what tocptr do we need? */
574 }
575 OrigFn;
576
577#define __SPECIAL_INSTRUCTION_PREAMBLE \
578 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
579 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
580
581#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
582 _zzq_default, _zzq_request, \
583 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
584 \
585 __extension__ \
586 ({ unsigned long int _zzq_args[6]; \
587 unsigned long int _zzq_result; \
588 unsigned long int* _zzq_ptr; \
589 _zzq_args[0] = (unsigned long int)(_zzq_request); \
590 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
591 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
592 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
593 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
594 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
595 _zzq_ptr = _zzq_args; \
596 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
597 "mr 4,%2\n\t" /*ptr*/ \
598 __SPECIAL_INSTRUCTION_PREAMBLE \
599 /* %R3 = client_request ( %R4 ) */ \
600 "or 1,1,1\n\t" \
601 "mr %0,3" /*result*/ \
602 : "=b" (_zzq_result) \
603 : "b" (_zzq_default), "b" (_zzq_ptr) \
604 : "cc", "memory", "r3", "r4"); \
605 _zzq_result; \
606 })
607
608#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
609 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
610 unsigned long int __addr; \
611 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
612 /* %R3 = guest_NRADDR */ \
613 "or 2,2,2\n\t" \
614 "mr %0,3" \
615 : "=b" (__addr) \
616 : \
617 : "cc", "memory", "r3" \
618 ); \
619 _zzq_orig->nraddr = __addr; \
620 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
621 /* %R3 = guest_NRADDR_GPR2 */ \
622 "or 4,4,4\n\t" \
623 "mr %0,3" \
624 : "=b" (__addr) \
625 : \
626 : "cc", "memory", "r3" \
627 ); \
628 _zzq_orig->r2 = __addr; \
629 }
630
631#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
632 __SPECIAL_INSTRUCTION_PREAMBLE \
633 /* branch-and-link-to-noredir *%R12 */ \
634 "or 3,3,3\n\t"
635
636#define VALGRIND_VEX_INJECT_IR() \
637 do { \
638 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
639 "or 5,5,5\n\t" \
640 ); \
641 } while (0)
642
643#endif /* PLAT_ppc64le_linux */
644
645/* ------------------------- arm-linux ------------------------- */
646
647#if defined(PLAT_arm_linux)
648
649typedef
650 struct {
651 unsigned int nraddr; /* where's the code? */
652 }
653 OrigFn;
654
655#define __SPECIAL_INSTRUCTION_PREAMBLE \
656 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
657 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
658
659#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
660 _zzq_default, _zzq_request, \
661 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
662 \
663 __extension__ \
664 ({volatile unsigned int _zzq_args[6]; \
665 volatile unsigned int _zzq_result; \
666 _zzq_args[0] = (unsigned int)(_zzq_request); \
667 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
668 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
669 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
670 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
671 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
672 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
673 "mov r4, %2\n\t" /*ptr*/ \
674 __SPECIAL_INSTRUCTION_PREAMBLE \
675 /* R3 = client_request ( R4 ) */ \
676 "orr r10, r10, r10\n\t" \
677 "mov %0, r3" /*result*/ \
678 : "=r" (_zzq_result) \
679 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
680 : "cc","memory", "r3", "r4"); \
681 _zzq_result; \
682 })
683
684#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
685 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
686 unsigned int __addr; \
687 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
688 /* R3 = guest_NRADDR */ \
689 "orr r11, r11, r11\n\t" \
690 "mov %0, r3" \
691 : "=r" (__addr) \
692 : \
693 : "cc", "memory", "r3" \
694 ); \
695 _zzq_orig->nraddr = __addr; \
696 }
697
698#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
699 __SPECIAL_INSTRUCTION_PREAMBLE \
700 /* branch-and-link-to-noredir *%R4 */ \
701 "orr r12, r12, r12\n\t"
702
703#define VALGRIND_VEX_INJECT_IR() \
704 do { \
705 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
706 "orr r9, r9, r9\n\t" \
707 : : : "cc", "memory" \
708 ); \
709 } while (0)
710
711#endif /* PLAT_arm_linux */
712
713/* ------------------------ arm64-linux ------------------------- */
714
715#if defined(PLAT_arm64_linux)
716
717typedef
718 struct {
719 unsigned long int nraddr; /* where's the code? */
720 }
721 OrigFn;
722
723#define __SPECIAL_INSTRUCTION_PREAMBLE \
724 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
725 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
726
727#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
728 _zzq_default, _zzq_request, \
729 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
730 \
731 __extension__ \
732 ({volatile unsigned long int _zzq_args[6]; \
733 volatile unsigned long int _zzq_result; \
734 _zzq_args[0] = (unsigned long int)(_zzq_request); \
735 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
736 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
737 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
738 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
739 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
740 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
741 "mov x4, %2\n\t" /*ptr*/ \
742 __SPECIAL_INSTRUCTION_PREAMBLE \
743 /* X3 = client_request ( X4 ) */ \
744 "orr x10, x10, x10\n\t" \
745 "mov %0, x3" /*result*/ \
746 : "=r" (_zzq_result) \
747 : "r" ((unsigned long int)(_zzq_default)), \
748 "r" (&_zzq_args[0]) \
749 : "cc","memory", "x3", "x4"); \
750 _zzq_result; \
751 })
752
753#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
754 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
755 unsigned long int __addr; \
756 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
757 /* X3 = guest_NRADDR */ \
758 "orr x11, x11, x11\n\t" \
759 "mov %0, x3" \
760 : "=r" (__addr) \
761 : \
762 : "cc", "memory", "x3" \
763 ); \
764 _zzq_orig->nraddr = __addr; \
765 }
766
767#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
768 __SPECIAL_INSTRUCTION_PREAMBLE \
769 /* branch-and-link-to-noredir X8 */ \
770 "orr x12, x12, x12\n\t"
771
772#define VALGRIND_VEX_INJECT_IR() \
773 do { \
774 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
775 "orr x9, x9, x9\n\t" \
776 : : : "cc", "memory" \
777 ); \
778 } while (0)
779
780#endif /* PLAT_arm64_linux */
781
782/* ------------------------ s390x-linux ------------------------ */
783
784#if defined(PLAT_s390x_linux)
785
786typedef
787 struct {
788 unsigned long int nraddr; /* where's the code? */
789 }
790 OrigFn;
791
792/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
793 * code. This detection is implemented in platform specific toIR.c
794 * (e.g. VEX/priv/guest_s390_decoder.c).
795 */
796#define __SPECIAL_INSTRUCTION_PREAMBLE \
797 "lr 15,15\n\t" \
798 "lr 1,1\n\t" \
799 "lr 2,2\n\t" \
800 "lr 3,3\n\t"
801
802#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
803#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
804#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
805#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
806
807#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
808 _zzq_default, _zzq_request, \
809 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
810 __extension__ \
811 ({volatile unsigned long int _zzq_args[6]; \
812 volatile unsigned long int _zzq_result; \
813 _zzq_args[0] = (unsigned long int)(_zzq_request); \
814 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
815 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
816 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
817 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
818 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
819 __asm__ volatile(/* r2 = args */ \
820 "lgr 2,%1\n\t" \
821 /* r3 = default */ \
822 "lgr 3,%2\n\t" \
823 __SPECIAL_INSTRUCTION_PREAMBLE \
824 __CLIENT_REQUEST_CODE \
825 /* results = r3 */ \
826 "lgr %0, 3\n\t" \
827 : "=d" (_zzq_result) \
828 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
829 : "cc", "2", "3", "memory" \
830 ); \
831 _zzq_result; \
832 })
833
834#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
835 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
836 volatile unsigned long int __addr; \
837 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
838 __GET_NR_CONTEXT_CODE \
839 "lgr %0, 3\n\t" \
840 : "=a" (__addr) \
841 : \
842 : "cc", "3", "memory" \
843 ); \
844 _zzq_orig->nraddr = __addr; \
845 }
846
847#define VALGRIND_CALL_NOREDIR_R1 \
848 __SPECIAL_INSTRUCTION_PREAMBLE \
849 __CALL_NO_REDIR_CODE
850
851#define VALGRIND_VEX_INJECT_IR() \
852 do { \
853 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
854 __VEX_INJECT_IR_CODE); \
855 } while (0)
856
857#endif /* PLAT_s390x_linux */
858
859/* ------------------------- mips32-linux ---------------- */
860
861#if defined(PLAT_mips32_linux)
862
863typedef
864 struct {
865 unsigned int nraddr; /* where's the code? */
866 }
867 OrigFn;
868
869/* .word 0x342
870 * .word 0x742
871 * .word 0xC2
872 * .word 0x4C2*/
873#define __SPECIAL_INSTRUCTION_PREAMBLE \
874 "srl $0, $0, 13\n\t" \
875 "srl $0, $0, 29\n\t" \
876 "srl $0, $0, 3\n\t" \
877 "srl $0, $0, 19\n\t"
878
879#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
880 _zzq_default, _zzq_request, \
881 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
882 __extension__ \
883 ({ volatile unsigned int _zzq_args[6]; \
884 volatile unsigned int _zzq_result; \
885 _zzq_args[0] = (unsigned int)(_zzq_request); \
886 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
887 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
888 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
889 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
890 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
891 __asm__ volatile("move $11, %1\n\t" /*default*/ \
892 "move $12, %2\n\t" /*ptr*/ \
893 __SPECIAL_INSTRUCTION_PREAMBLE \
894 /* T3 = client_request ( T4 ) */ \
895 "or $13, $13, $13\n\t" \
896 "move %0, $11\n\t" /*result*/ \
897 : "=r" (_zzq_result) \
898 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
899 : "$11", "$12", "memory"); \
900 _zzq_result; \
901 })
902
903#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
904 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
905 volatile unsigned int __addr; \
906 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
907 /* %t9 = guest_NRADDR */ \
908 "or $14, $14, $14\n\t" \
909 "move %0, $11" /*result*/ \
910 : "=r" (__addr) \
911 : \
912 : "$11" \
913 ); \
914 _zzq_orig->nraddr = __addr; \
915 }
916
917#define VALGRIND_CALL_NOREDIR_T9 \
918 __SPECIAL_INSTRUCTION_PREAMBLE \
919 /* call-noredir *%t9 */ \
920 "or $15, $15, $15\n\t"
921
922#define VALGRIND_VEX_INJECT_IR() \
923 do { \
924 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
925 "or $11, $11, $11\n\t" \
926 ); \
927 } while (0)
928
929
930#endif /* PLAT_mips32_linux */
931
932/* ------------------------- mips64-linux ---------------- */
933
934#if defined(PLAT_mips64_linux)
935
936typedef
937 struct {
938 unsigned long nraddr; /* where's the code? */
939 }
940 OrigFn;
941
942/* dsll $0,$0, 3
943 * dsll $0,$0, 13
944 * dsll $0,$0, 29
945 * dsll $0,$0, 19*/
946#define __SPECIAL_INSTRUCTION_PREAMBLE \
947 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
948 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
949
950#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
951 _zzq_default, _zzq_request, \
952 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
953 __extension__ \
954 ({ volatile unsigned long int _zzq_args[6]; \
955 volatile unsigned long int _zzq_result; \
956 _zzq_args[0] = (unsigned long int)(_zzq_request); \
957 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
958 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
959 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
960 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
961 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
962 __asm__ volatile("move $11, %1\n\t" /*default*/ \
963 "move $12, %2\n\t" /*ptr*/ \
964 __SPECIAL_INSTRUCTION_PREAMBLE \
965 /* $11 = client_request ( $12 ) */ \
966 "or $13, $13, $13\n\t" \
967 "move %0, $11\n\t" /*result*/ \
968 : "=r" (_zzq_result) \
969 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
970 : "$11", "$12", "memory"); \
971 _zzq_result; \
972 })
973
974#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
975 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
976 volatile unsigned long int __addr; \
977 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
978 /* $11 = guest_NRADDR */ \
979 "or $14, $14, $14\n\t" \
980 "move %0, $11" /*result*/ \
981 : "=r" (__addr) \
982 : \
983 : "$11"); \
984 _zzq_orig->nraddr = __addr; \
985 }
986
987#define VALGRIND_CALL_NOREDIR_T9 \
988 __SPECIAL_INSTRUCTION_PREAMBLE \
989 /* call-noredir $25 */ \
990 "or $15, $15, $15\n\t"
991
992#define VALGRIND_VEX_INJECT_IR() \
993 do { \
994 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
995 "or $11, $11, $11\n\t" \
996 ); \
997 } while (0)
998
999#endif /* PLAT_mips64_linux */
1000
1001#if defined(PLAT_nanomips_linux)
1002
1003typedef
1004 struct {
1005 unsigned int nraddr; /* where's the code? */
1006 }
1007 OrigFn;
1008/*
1009 8000 c04d srl zero, zero, 13
1010 8000 c05d srl zero, zero, 29
1011 8000 c043 srl zero, zero, 3
1012 8000 c053 srl zero, zero, 19
1013*/
1014
1015#define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1016 "srl[32] $zero, $zero, 29 \n\t" \
1017 "srl[32] $zero, $zero, 3 \n\t" \
1018 "srl[32] $zero, $zero, 19 \n\t"
1019
1020#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1021 _zzq_default, _zzq_request, \
1022 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1023 __extension__ \
1024 ({ volatile unsigned int _zzq_args[6]; \
1025 volatile unsigned int _zzq_result; \
1026 _zzq_args[0] = (unsigned int)(_zzq_request); \
1027 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1028 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1029 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1030 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1031 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1032 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1033 "move $t0, %2\n\t" /* ptr */ \
1034 __SPECIAL_INSTRUCTION_PREAMBLE \
1035 /* $a7 = client_request( $t0 ) */ \
1036 "or[32] $t0, $t0, $t0\n\t" \
1037 "move %0, $a7\n\t" /* result */ \
1038 : "=r" (_zzq_result) \
1039 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1040 : "$a7", "$t0", "memory"); \
1041 _zzq_result; \
1042 })
1043
1044#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1045 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1046 volatile unsigned long int __addr; \
1047 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1048 /* $a7 = guest_NRADDR */ \
1049 "or[32] $t1, $t1, $t1\n\t" \
1050 "move %0, $a7" /*result*/ \
1051 : "=r" (__addr) \
1052 : \
1053 : "$a7"); \
1054 _zzq_orig->nraddr = __addr; \
1055 }
1056
1057#define VALGRIND_CALL_NOREDIR_T9 \
1058 __SPECIAL_INSTRUCTION_PREAMBLE \
1059 /* call-noredir $25 */ \
1060 "or[32] $t2, $t2, $t2\n\t"
1061
1062#define VALGRIND_VEX_INJECT_IR() \
1063 do { \
1064 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1065 "or[32] $t3, $t3, $t3\n\t" \
1066 ); \
1067 } while (0)
1068
1069#endif
1070/* Insert assembly code for other platforms here... */
1071
1072#endif /* CONFIG_VALGRIND */
1073
1074
1075/* ------------------------------------------------------------------ */
1076/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1077/* ugly. It's the least-worst tradeoff I can think of. */
1078/* ------------------------------------------------------------------ */
1079
1080/* This section defines magic (a.k.a appalling-hack) macros for doing
1081 guaranteed-no-redirection macros, so as to get from function
1082 wrappers to the functions they are wrapping. The whole point is to
1083 construct standard call sequences, but to do the call itself with a
1084 special no-redirect call pseudo-instruction that the JIT
1085 understands and handles specially. This section is long and
1086 repetitious, and I can't see a way to make it shorter.
1087
1088 The naming scheme is as follows:
1089
1090 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1091
1092 'W' stands for "word" and 'v' for "void". Hence there are
1093 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1094 and for each, the possibility of returning a word-typed result, or
1095 no result.
1096*/
1097
1098/* Use these to write the name of your wrapper. NOTE: duplicates
1099 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1100 the default behaviour equivalance class tag "0000" into the name.
1101 See pub_tool_redir.h for details -- normally you don't need to
1102 think about this, though. */
1103
1104/* Use an extra level of macroisation so as to ensure the soname/fnname
1105 args are fully macro-expanded before pasting them together. */
1106#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1107
1108#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1109 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1110
1111#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1112 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1113
1114/* Use this macro from within a wrapper function to collect the
1115 context (address and possibly other info) of the original function.
1116 Once you have that you can then use it in one of the CALL_FN_
1117 macros. The type of the argument _lval is OrigFn. */
1118#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1119
1120/* Also provide end-user facilities for function replacement, rather
1121 than wrapping. A replacement function differs from a wrapper in
1122 that it has no way to get hold of the original function being
1123 called, and hence no way to call onwards to it. In a replacement
1124 function, VALGRIND_GET_ORIG_FN always returns zero. */
1125
1126#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1127 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1128
1129#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1130 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1131
1132/* Derivatives of the main macros below, for calling functions
1133 returning void. */
1134
1135#define CALL_FN_v_v(fnptr) \
1136 do { volatile unsigned long _junk; \
1137 CALL_FN_W_v(_junk,fnptr); } while (0)
1138
1139#define CALL_FN_v_W(fnptr, arg1) \
1140 do { volatile unsigned long _junk; \
1141 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1142
1143#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1144 do { volatile unsigned long _junk; \
1145 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1146
1147#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1148 do { volatile unsigned long _junk; \
1149 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1150
1151#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1152 do { volatile unsigned long _junk; \
1153 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1154
1155#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1156 do { volatile unsigned long _junk; \
1157 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1158
1159#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1160 do { volatile unsigned long _junk; \
1161 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1162
1163#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1164 do { volatile unsigned long _junk; \
1165 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1166
1167/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1168
1169#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1170 || defined(PLAT_x86_solaris)
1171
1172/* These regs are trashed by the hidden call. No need to mention eax
1173 as gcc can already see that, plus causes gcc to bomb. */
1174#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1175
1176/* Macros to save and align the stack before making a function
1177 call and restore it afterwards as gcc may not keep the stack
1178 pointer aligned if it doesn't realise calls are being made
1179 to other functions. */
1180
1181#define VALGRIND_ALIGN_STACK \
1182 "movl %%esp,%%edi\n\t" \
1183 "andl $0xfffffff0,%%esp\n\t"
1184#define VALGRIND_RESTORE_STACK \
1185 "movl %%edi,%%esp\n\t"
1186
1187/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1188 long) == 4. */
1189
1190#define CALL_FN_W_v(lval, orig) \
1191 do { \
1192 volatile OrigFn _orig = (orig); \
1193 volatile unsigned long _argvec[1]; \
1194 volatile unsigned long _res; \
1195 _argvec[0] = (unsigned long)_orig.nraddr; \
1196 __asm__ volatile( \
1197 VALGRIND_ALIGN_STACK \
1198 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1199 VALGRIND_CALL_NOREDIR_EAX \
1200 VALGRIND_RESTORE_STACK \
1201 : /*out*/ "=a" (_res) \
1202 : /*in*/ "a" (&_argvec[0]) \
1203 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1204 ); \
1205 lval = (__typeof__(lval)) _res; \
1206 } while (0)
1207
1208#define CALL_FN_W_W(lval, orig, arg1) \
1209 do { \
1210 volatile OrigFn _orig = (orig); \
1211 volatile unsigned long _argvec[2]; \
1212 volatile unsigned long _res; \
1213 _argvec[0] = (unsigned long)_orig.nraddr; \
1214 _argvec[1] = (unsigned long)(arg1); \
1215 __asm__ volatile( \
1216 VALGRIND_ALIGN_STACK \
1217 "subl $12, %%esp\n\t" \
1218 "pushl 4(%%eax)\n\t" \
1219 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1220 VALGRIND_CALL_NOREDIR_EAX \
1221 VALGRIND_RESTORE_STACK \
1222 : /*out*/ "=a" (_res) \
1223 : /*in*/ "a" (&_argvec[0]) \
1224 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1225 ); \
1226 lval = (__typeof__(lval)) _res; \
1227 } while (0)
1228
1229#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1230 do { \
1231 volatile OrigFn _orig = (orig); \
1232 volatile unsigned long _argvec[3]; \
1233 volatile unsigned long _res; \
1234 _argvec[0] = (unsigned long)_orig.nraddr; \
1235 _argvec[1] = (unsigned long)(arg1); \
1236 _argvec[2] = (unsigned long)(arg2); \
1237 __asm__ volatile( \
1238 VALGRIND_ALIGN_STACK \
1239 "subl $8, %%esp\n\t" \
1240 "pushl 8(%%eax)\n\t" \
1241 "pushl 4(%%eax)\n\t" \
1242 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1243 VALGRIND_CALL_NOREDIR_EAX \
1244 VALGRIND_RESTORE_STACK \
1245 : /*out*/ "=a" (_res) \
1246 : /*in*/ "a" (&_argvec[0]) \
1247 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1248 ); \
1249 lval = (__typeof__(lval)) _res; \
1250 } while (0)
1251
1252#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1253 do { \
1254 volatile OrigFn _orig = (orig); \
1255 volatile unsigned long _argvec[4]; \
1256 volatile unsigned long _res; \
1257 _argvec[0] = (unsigned long)_orig.nraddr; \
1258 _argvec[1] = (unsigned long)(arg1); \
1259 _argvec[2] = (unsigned long)(arg2); \
1260 _argvec[3] = (unsigned long)(arg3); \
1261 __asm__ volatile( \
1262 VALGRIND_ALIGN_STACK \
1263 "subl $4, %%esp\n\t" \
1264 "pushl 12(%%eax)\n\t" \
1265 "pushl 8(%%eax)\n\t" \
1266 "pushl 4(%%eax)\n\t" \
1267 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1268 VALGRIND_CALL_NOREDIR_EAX \
1269 VALGRIND_RESTORE_STACK \
1270 : /*out*/ "=a" (_res) \
1271 : /*in*/ "a" (&_argvec[0]) \
1272 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1273 ); \
1274 lval = (__typeof__(lval)) _res; \
1275 } while (0)
1276
1277#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1278 do { \
1279 volatile OrigFn _orig = (orig); \
1280 volatile unsigned long _argvec[5]; \
1281 volatile unsigned long _res; \
1282 _argvec[0] = (unsigned long)_orig.nraddr; \
1283 _argvec[1] = (unsigned long)(arg1); \
1284 _argvec[2] = (unsigned long)(arg2); \
1285 _argvec[3] = (unsigned long)(arg3); \
1286 _argvec[4] = (unsigned long)(arg4); \
1287 __asm__ volatile( \
1288 VALGRIND_ALIGN_STACK \
1289 "pushl 16(%%eax)\n\t" \
1290 "pushl 12(%%eax)\n\t" \
1291 "pushl 8(%%eax)\n\t" \
1292 "pushl 4(%%eax)\n\t" \
1293 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1294 VALGRIND_CALL_NOREDIR_EAX \
1295 VALGRIND_RESTORE_STACK \
1296 : /*out*/ "=a" (_res) \
1297 : /*in*/ "a" (&_argvec[0]) \
1298 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1299 ); \
1300 lval = (__typeof__(lval)) _res; \
1301 } while (0)
1302
1303#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1304 do { \
1305 volatile OrigFn _orig = (orig); \
1306 volatile unsigned long _argvec[6]; \
1307 volatile unsigned long _res; \
1308 _argvec[0] = (unsigned long)_orig.nraddr; \
1309 _argvec[1] = (unsigned long)(arg1); \
1310 _argvec[2] = (unsigned long)(arg2); \
1311 _argvec[3] = (unsigned long)(arg3); \
1312 _argvec[4] = (unsigned long)(arg4); \
1313 _argvec[5] = (unsigned long)(arg5); \
1314 __asm__ volatile( \
1315 VALGRIND_ALIGN_STACK \
1316 "subl $12, %%esp\n\t" \
1317 "pushl 20(%%eax)\n\t" \
1318 "pushl 16(%%eax)\n\t" \
1319 "pushl 12(%%eax)\n\t" \
1320 "pushl 8(%%eax)\n\t" \
1321 "pushl 4(%%eax)\n\t" \
1322 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1323 VALGRIND_CALL_NOREDIR_EAX \
1324 VALGRIND_RESTORE_STACK \
1325 : /*out*/ "=a" (_res) \
1326 : /*in*/ "a" (&_argvec[0]) \
1327 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1328 ); \
1329 lval = (__typeof__(lval)) _res; \
1330 } while (0)
1331
1332#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1333 do { \
1334 volatile OrigFn _orig = (orig); \
1335 volatile unsigned long _argvec[7]; \
1336 volatile unsigned long _res; \
1337 _argvec[0] = (unsigned long)_orig.nraddr; \
1338 _argvec[1] = (unsigned long)(arg1); \
1339 _argvec[2] = (unsigned long)(arg2); \
1340 _argvec[3] = (unsigned long)(arg3); \
1341 _argvec[4] = (unsigned long)(arg4); \
1342 _argvec[5] = (unsigned long)(arg5); \
1343 _argvec[6] = (unsigned long)(arg6); \
1344 __asm__ volatile( \
1345 VALGRIND_ALIGN_STACK \
1346 "subl $8, %%esp\n\t" \
1347 "pushl 24(%%eax)\n\t" \
1348 "pushl 20(%%eax)\n\t" \
1349 "pushl 16(%%eax)\n\t" \
1350 "pushl 12(%%eax)\n\t" \
1351 "pushl 8(%%eax)\n\t" \
1352 "pushl 4(%%eax)\n\t" \
1353 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1354 VALGRIND_CALL_NOREDIR_EAX \
1355 VALGRIND_RESTORE_STACK \
1356 : /*out*/ "=a" (_res) \
1357 : /*in*/ "a" (&_argvec[0]) \
1358 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1359 ); \
1360 lval = (__typeof__(lval)) _res; \
1361 } while (0)
1362
1363#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1364 arg7) \
1365 do { \
1366 volatile OrigFn _orig = (orig); \
1367 volatile unsigned long _argvec[8]; \
1368 volatile unsigned long _res; \
1369 _argvec[0] = (unsigned long)_orig.nraddr; \
1370 _argvec[1] = (unsigned long)(arg1); \
1371 _argvec[2] = (unsigned long)(arg2); \
1372 _argvec[3] = (unsigned long)(arg3); \
1373 _argvec[4] = (unsigned long)(arg4); \
1374 _argvec[5] = (unsigned long)(arg5); \
1375 _argvec[6] = (unsigned long)(arg6); \
1376 _argvec[7] = (unsigned long)(arg7); \
1377 __asm__ volatile( \
1378 VALGRIND_ALIGN_STACK \
1379 "subl $4, %%esp\n\t" \
1380 "pushl 28(%%eax)\n\t" \
1381 "pushl 24(%%eax)\n\t" \
1382 "pushl 20(%%eax)\n\t" \
1383 "pushl 16(%%eax)\n\t" \
1384 "pushl 12(%%eax)\n\t" \
1385 "pushl 8(%%eax)\n\t" \
1386 "pushl 4(%%eax)\n\t" \
1387 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1388 VALGRIND_CALL_NOREDIR_EAX \
1389 VALGRIND_RESTORE_STACK \
1390 : /*out*/ "=a" (_res) \
1391 : /*in*/ "a" (&_argvec[0]) \
1392 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1393 ); \
1394 lval = (__typeof__(lval)) _res; \
1395 } while (0)
1396
1397#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1398 arg7,arg8) \
1399 do { \
1400 volatile OrigFn _orig = (orig); \
1401 volatile unsigned long _argvec[9]; \
1402 volatile unsigned long _res; \
1403 _argvec[0] = (unsigned long)_orig.nraddr; \
1404 _argvec[1] = (unsigned long)(arg1); \
1405 _argvec[2] = (unsigned long)(arg2); \
1406 _argvec[3] = (unsigned long)(arg3); \
1407 _argvec[4] = (unsigned long)(arg4); \
1408 _argvec[5] = (unsigned long)(arg5); \
1409 _argvec[6] = (unsigned long)(arg6); \
1410 _argvec[7] = (unsigned long)(arg7); \
1411 _argvec[8] = (unsigned long)(arg8); \
1412 __asm__ volatile( \
1413 VALGRIND_ALIGN_STACK \
1414 "pushl 32(%%eax)\n\t" \
1415 "pushl 28(%%eax)\n\t" \
1416 "pushl 24(%%eax)\n\t" \
1417 "pushl 20(%%eax)\n\t" \
1418 "pushl 16(%%eax)\n\t" \
1419 "pushl 12(%%eax)\n\t" \
1420 "pushl 8(%%eax)\n\t" \
1421 "pushl 4(%%eax)\n\t" \
1422 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1423 VALGRIND_CALL_NOREDIR_EAX \
1424 VALGRIND_RESTORE_STACK \
1425 : /*out*/ "=a" (_res) \
1426 : /*in*/ "a" (&_argvec[0]) \
1427 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1428 ); \
1429 lval = (__typeof__(lval)) _res; \
1430 } while (0)
1431
1432#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1433 arg7,arg8,arg9) \
1434 do { \
1435 volatile OrigFn _orig = (orig); \
1436 volatile unsigned long _argvec[10]; \
1437 volatile unsigned long _res; \
1438 _argvec[0] = (unsigned long)_orig.nraddr; \
1439 _argvec[1] = (unsigned long)(arg1); \
1440 _argvec[2] = (unsigned long)(arg2); \
1441 _argvec[3] = (unsigned long)(arg3); \
1442 _argvec[4] = (unsigned long)(arg4); \
1443 _argvec[5] = (unsigned long)(arg5); \
1444 _argvec[6] = (unsigned long)(arg6); \
1445 _argvec[7] = (unsigned long)(arg7); \
1446 _argvec[8] = (unsigned long)(arg8); \
1447 _argvec[9] = (unsigned long)(arg9); \
1448 __asm__ volatile( \
1449 VALGRIND_ALIGN_STACK \
1450 "subl $12, %%esp\n\t" \
1451 "pushl 36(%%eax)\n\t" \
1452 "pushl 32(%%eax)\n\t" \
1453 "pushl 28(%%eax)\n\t" \
1454 "pushl 24(%%eax)\n\t" \
1455 "pushl 20(%%eax)\n\t" \
1456 "pushl 16(%%eax)\n\t" \
1457 "pushl 12(%%eax)\n\t" \
1458 "pushl 8(%%eax)\n\t" \
1459 "pushl 4(%%eax)\n\t" \
1460 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1461 VALGRIND_CALL_NOREDIR_EAX \
1462 VALGRIND_RESTORE_STACK \
1463 : /*out*/ "=a" (_res) \
1464 : /*in*/ "a" (&_argvec[0]) \
1465 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1466 ); \
1467 lval = (__typeof__(lval)) _res; \
1468 } while (0)
1469
1470#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1471 arg7,arg8,arg9,arg10) \
1472 do { \
1473 volatile OrigFn _orig = (orig); \
1474 volatile unsigned long _argvec[11]; \
1475 volatile unsigned long _res; \
1476 _argvec[0] = (unsigned long)_orig.nraddr; \
1477 _argvec[1] = (unsigned long)(arg1); \
1478 _argvec[2] = (unsigned long)(arg2); \
1479 _argvec[3] = (unsigned long)(arg3); \
1480 _argvec[4] = (unsigned long)(arg4); \
1481 _argvec[5] = (unsigned long)(arg5); \
1482 _argvec[6] = (unsigned long)(arg6); \
1483 _argvec[7] = (unsigned long)(arg7); \
1484 _argvec[8] = (unsigned long)(arg8); \
1485 _argvec[9] = (unsigned long)(arg9); \
1486 _argvec[10] = (unsigned long)(arg10); \
1487 __asm__ volatile( \
1488 VALGRIND_ALIGN_STACK \
1489 "subl $8, %%esp\n\t" \
1490 "pushl 40(%%eax)\n\t" \
1491 "pushl 36(%%eax)\n\t" \
1492 "pushl 32(%%eax)\n\t" \
1493 "pushl 28(%%eax)\n\t" \
1494 "pushl 24(%%eax)\n\t" \
1495 "pushl 20(%%eax)\n\t" \
1496 "pushl 16(%%eax)\n\t" \
1497 "pushl 12(%%eax)\n\t" \
1498 "pushl 8(%%eax)\n\t" \
1499 "pushl 4(%%eax)\n\t" \
1500 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1501 VALGRIND_CALL_NOREDIR_EAX \
1502 VALGRIND_RESTORE_STACK \
1503 : /*out*/ "=a" (_res) \
1504 : /*in*/ "a" (&_argvec[0]) \
1505 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1506 ); \
1507 lval = (__typeof__(lval)) _res; \
1508 } while (0)
1509
1510#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1511 arg6,arg7,arg8,arg9,arg10, \
1512 arg11) \
1513 do { \
1514 volatile OrigFn _orig = (orig); \
1515 volatile unsigned long _argvec[12]; \
1516 volatile unsigned long _res; \
1517 _argvec[0] = (unsigned long)_orig.nraddr; \
1518 _argvec[1] = (unsigned long)(arg1); \
1519 _argvec[2] = (unsigned long)(arg2); \
1520 _argvec[3] = (unsigned long)(arg3); \
1521 _argvec[4] = (unsigned long)(arg4); \
1522 _argvec[5] = (unsigned long)(arg5); \
1523 _argvec[6] = (unsigned long)(arg6); \
1524 _argvec[7] = (unsigned long)(arg7); \
1525 _argvec[8] = (unsigned long)(arg8); \
1526 _argvec[9] = (unsigned long)(arg9); \
1527 _argvec[10] = (unsigned long)(arg10); \
1528 _argvec[11] = (unsigned long)(arg11); \
1529 __asm__ volatile( \
1530 VALGRIND_ALIGN_STACK \
1531 "subl $4, %%esp\n\t" \
1532 "pushl 44(%%eax)\n\t" \
1533 "pushl 40(%%eax)\n\t" \
1534 "pushl 36(%%eax)\n\t" \
1535 "pushl 32(%%eax)\n\t" \
1536 "pushl 28(%%eax)\n\t" \
1537 "pushl 24(%%eax)\n\t" \
1538 "pushl 20(%%eax)\n\t" \
1539 "pushl 16(%%eax)\n\t" \
1540 "pushl 12(%%eax)\n\t" \
1541 "pushl 8(%%eax)\n\t" \
1542 "pushl 4(%%eax)\n\t" \
1543 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1544 VALGRIND_CALL_NOREDIR_EAX \
1545 VALGRIND_RESTORE_STACK \
1546 : /*out*/ "=a" (_res) \
1547 : /*in*/ "a" (&_argvec[0]) \
1548 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1549 ); \
1550 lval = (__typeof__(lval)) _res; \
1551 } while (0)
1552
1553#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1554 arg6,arg7,arg8,arg9,arg10, \
1555 arg11,arg12) \
1556 do { \
1557 volatile OrigFn _orig = (orig); \
1558 volatile unsigned long _argvec[13]; \
1559 volatile unsigned long _res; \
1560 _argvec[0] = (unsigned long)_orig.nraddr; \
1561 _argvec[1] = (unsigned long)(arg1); \
1562 _argvec[2] = (unsigned long)(arg2); \
1563 _argvec[3] = (unsigned long)(arg3); \
1564 _argvec[4] = (unsigned long)(arg4); \
1565 _argvec[5] = (unsigned long)(arg5); \
1566 _argvec[6] = (unsigned long)(arg6); \
1567 _argvec[7] = (unsigned long)(arg7); \
1568 _argvec[8] = (unsigned long)(arg8); \
1569 _argvec[9] = (unsigned long)(arg9); \
1570 _argvec[10] = (unsigned long)(arg10); \
1571 _argvec[11] = (unsigned long)(arg11); \
1572 _argvec[12] = (unsigned long)(arg12); \
1573 __asm__ volatile( \
1574 VALGRIND_ALIGN_STACK \
1575 "pushl 48(%%eax)\n\t" \
1576 "pushl 44(%%eax)\n\t" \
1577 "pushl 40(%%eax)\n\t" \
1578 "pushl 36(%%eax)\n\t" \
1579 "pushl 32(%%eax)\n\t" \
1580 "pushl 28(%%eax)\n\t" \
1581 "pushl 24(%%eax)\n\t" \
1582 "pushl 20(%%eax)\n\t" \
1583 "pushl 16(%%eax)\n\t" \
1584 "pushl 12(%%eax)\n\t" \
1585 "pushl 8(%%eax)\n\t" \
1586 "pushl 4(%%eax)\n\t" \
1587 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1588 VALGRIND_CALL_NOREDIR_EAX \
1589 VALGRIND_RESTORE_STACK \
1590 : /*out*/ "=a" (_res) \
1591 : /*in*/ "a" (&_argvec[0]) \
1592 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1593 ); \
1594 lval = (__typeof__(lval)) _res; \
1595 } while (0)
1596
1597#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1598
1599/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1600
1601#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1602 || defined(PLAT_amd64_solaris)
1603
1604/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1605
1606/* These regs are trashed by the hidden call. */
1607#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1608 "rdi", "r8", "r9", "r10", "r11"
1609
1610/* This is all pretty complex. It's so as to make stack unwinding
1611 work reliably. See bug 243270. The basic problem is the sub and
1612 add of 128 of %rsp in all of the following macros. If gcc believes
1613 the CFA is in %rsp, then unwinding may fail, because what's at the
1614 CFA is not what gcc "expected" when it constructs the CFIs for the
1615 places where the macros are instantiated.
1616
1617 But we can't just add a CFI annotation to increase the CFA offset
1618 by 128, to match the sub of 128 from %rsp, because we don't know
1619 whether gcc has chosen %rsp as the CFA at that point, or whether it
1620 has chosen some other register (eg, %rbp). In the latter case,
1621 adding a CFI annotation to change the CFA offset is simply wrong.
1622
1623 So the solution is to get hold of the CFA using
1624 __builtin_dwarf_cfa(), put it in a known register, and add a
1625 CFI annotation to say what the register is. We choose %rbp for
1626 this (perhaps perversely), because:
1627
1628 (1) %rbp is already subject to unwinding. If a new register was
1629 chosen then the unwinder would have to unwind it in all stack
1630 traces, which is expensive, and
1631
1632 (2) %rbp is already subject to precise exception updates in the
1633 JIT. If a new register was chosen, we'd have to have precise
1634 exceptions for it too, which reduces performance of the
1635 generated code.
1636
1637 However .. one extra complication. We can't just whack the result
1638 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1639 list of trashed registers at the end of the inline assembly
1640 fragments; gcc won't allow %rbp to appear in that list. Hence
1641 instead we need to stash %rbp in %r15 for the duration of the asm,
1642 and say that %r15 is trashed instead. gcc seems happy to go with
1643 that.
1644
1645 Oh .. and this all needs to be conditionalised so that it is
1646 unchanged from before this commit, when compiled with older gccs
1647 that don't support __builtin_dwarf_cfa. Furthermore, since
1648 this header file is freestanding, it has to be independent of
1649 config.h, and so the following conditionalisation cannot depend on
1650 configure time checks.
1651
1652 Although it's not clear from
1653 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1654 this expression excludes Darwin.
1655 .cfi directives in Darwin assembly appear to be completely
1656 different and I haven't investigated how they work.
1657
1658 For even more entertainment value, note we have to use the
1659 completely undocumented __builtin_dwarf_cfa(), which appears to
1660 really compute the CFA, whereas __builtin_frame_address(0) claims
1661 to but actually doesn't. See
1662 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1663*/
1664#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1665# define __FRAME_POINTER \
1666 ,"r"(__builtin_dwarf_cfa())
1667# define VALGRIND_CFI_PROLOGUE \
1668 "movq %%rbp, %%r15\n\t" \
1669 "movq %2, %%rbp\n\t" \
1670 ".cfi_remember_state\n\t" \
1671 ".cfi_def_cfa rbp, 0\n\t"
1672# define VALGRIND_CFI_EPILOGUE \
1673 "movq %%r15, %%rbp\n\t" \
1674 ".cfi_restore_state\n\t"
1675#else
1676# define __FRAME_POINTER
1677# define VALGRIND_CFI_PROLOGUE
1678# define VALGRIND_CFI_EPILOGUE
1679#endif
1680
1681/* Macros to save and align the stack before making a function
1682 call and restore it afterwards as gcc may not keep the stack
1683 pointer aligned if it doesn't realise calls are being made
1684 to other functions. */
1685
1686#define VALGRIND_ALIGN_STACK \
1687 "movq %%rsp,%%r14\n\t" \
1688 "andq $0xfffffffffffffff0,%%rsp\n\t"
1689#define VALGRIND_RESTORE_STACK \
1690 "movq %%r14,%%rsp\n\t"
1691
1692/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1693 long) == 8. */
1694
1695/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1696 macros. In order not to trash the stack redzone, we need to drop
1697 %rsp by 128 before the hidden call, and restore afterwards. The
1698 nastyness is that it is only by luck that the stack still appears
1699 to be unwindable during the hidden call - since then the behaviour
1700 of any routine using this macro does not match what the CFI data
1701 says. Sigh.
1702
1703 Why is this important? Imagine that a wrapper has a stack
1704 allocated local, and passes to the hidden call, a pointer to it.
1705 Because gcc does not know about the hidden call, it may allocate
1706 that local in the redzone. Unfortunately the hidden call may then
1707 trash it before it comes to use it. So we must step clear of the
1708 redzone, for the duration of the hidden call, to make it safe.
1709
1710 Probably the same problem afflicts the other redzone-style ABIs too
1711 (ppc64-linux); but for those, the stack is
1712 self describing (none of this CFI nonsense) so at least messing
1713 with the stack pointer doesn't give a danger of non-unwindable
1714 stack. */
1715
1716#define CALL_FN_W_v(lval, orig) \
1717 do { \
1718 volatile OrigFn _orig = (orig); \
1719 volatile unsigned long _argvec[1]; \
1720 volatile unsigned long _res; \
1721 _argvec[0] = (unsigned long)_orig.nraddr; \
1722 __asm__ volatile( \
1723 VALGRIND_CFI_PROLOGUE \
1724 VALGRIND_ALIGN_STACK \
1725 "subq $128,%%rsp\n\t" \
1726 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1727 VALGRIND_CALL_NOREDIR_RAX \
1728 VALGRIND_RESTORE_STACK \
1729 VALGRIND_CFI_EPILOGUE \
1730 : /*out*/ "=a" (_res) \
1731 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1732 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1733 ); \
1734 lval = (__typeof__(lval)) _res; \
1735 } while (0)
1736
1737#define CALL_FN_W_W(lval, orig, arg1) \
1738 do { \
1739 volatile OrigFn _orig = (orig); \
1740 volatile unsigned long _argvec[2]; \
1741 volatile unsigned long _res; \
1742 _argvec[0] = (unsigned long)_orig.nraddr; \
1743 _argvec[1] = (unsigned long)(arg1); \
1744 __asm__ volatile( \
1745 VALGRIND_CFI_PROLOGUE \
1746 VALGRIND_ALIGN_STACK \
1747 "subq $128,%%rsp\n\t" \
1748 "movq 8(%%rax), %%rdi\n\t" \
1749 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1750 VALGRIND_CALL_NOREDIR_RAX \
1751 VALGRIND_RESTORE_STACK \
1752 VALGRIND_CFI_EPILOGUE \
1753 : /*out*/ "=a" (_res) \
1754 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1755 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1756 ); \
1757 lval = (__typeof__(lval)) _res; \
1758 } while (0)
1759
1760#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1761 do { \
1762 volatile OrigFn _orig = (orig); \
1763 volatile unsigned long _argvec[3]; \
1764 volatile unsigned long _res; \
1765 _argvec[0] = (unsigned long)_orig.nraddr; \
1766 _argvec[1] = (unsigned long)(arg1); \
1767 _argvec[2] = (unsigned long)(arg2); \
1768 __asm__ volatile( \
1769 VALGRIND_CFI_PROLOGUE \
1770 VALGRIND_ALIGN_STACK \
1771 "subq $128,%%rsp\n\t" \
1772 "movq 16(%%rax), %%rsi\n\t" \
1773 "movq 8(%%rax), %%rdi\n\t" \
1774 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1775 VALGRIND_CALL_NOREDIR_RAX \
1776 VALGRIND_RESTORE_STACK \
1777 VALGRIND_CFI_EPILOGUE \
1778 : /*out*/ "=a" (_res) \
1779 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1780 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1781 ); \
1782 lval = (__typeof__(lval)) _res; \
1783 } while (0)
1784
1785#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1786 do { \
1787 volatile OrigFn _orig = (orig); \
1788 volatile unsigned long _argvec[4]; \
1789 volatile unsigned long _res; \
1790 _argvec[0] = (unsigned long)_orig.nraddr; \
1791 _argvec[1] = (unsigned long)(arg1); \
1792 _argvec[2] = (unsigned long)(arg2); \
1793 _argvec[3] = (unsigned long)(arg3); \
1794 __asm__ volatile( \
1795 VALGRIND_CFI_PROLOGUE \
1796 VALGRIND_ALIGN_STACK \
1797 "subq $128,%%rsp\n\t" \
1798 "movq 24(%%rax), %%rdx\n\t" \
1799 "movq 16(%%rax), %%rsi\n\t" \
1800 "movq 8(%%rax), %%rdi\n\t" \
1801 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1802 VALGRIND_CALL_NOREDIR_RAX \
1803 VALGRIND_RESTORE_STACK \
1804 VALGRIND_CFI_EPILOGUE \
1805 : /*out*/ "=a" (_res) \
1806 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1808 ); \
1809 lval = (__typeof__(lval)) _res; \
1810 } while (0)
1811
1812#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1813 do { \
1814 volatile OrigFn _orig = (orig); \
1815 volatile unsigned long _argvec[5]; \
1816 volatile unsigned long _res; \
1817 _argvec[0] = (unsigned long)_orig.nraddr; \
1818 _argvec[1] = (unsigned long)(arg1); \
1819 _argvec[2] = (unsigned long)(arg2); \
1820 _argvec[3] = (unsigned long)(arg3); \
1821 _argvec[4] = (unsigned long)(arg4); \
1822 __asm__ volatile( \
1823 VALGRIND_CFI_PROLOGUE \
1824 VALGRIND_ALIGN_STACK \
1825 "subq $128,%%rsp\n\t" \
1826 "movq 32(%%rax), %%rcx\n\t" \
1827 "movq 24(%%rax), %%rdx\n\t" \
1828 "movq 16(%%rax), %%rsi\n\t" \
1829 "movq 8(%%rax), %%rdi\n\t" \
1830 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1831 VALGRIND_CALL_NOREDIR_RAX \
1832 VALGRIND_RESTORE_STACK \
1833 VALGRIND_CFI_EPILOGUE \
1834 : /*out*/ "=a" (_res) \
1835 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1836 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1837 ); \
1838 lval = (__typeof__(lval)) _res; \
1839 } while (0)
1840
1841#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1842 do { \
1843 volatile OrigFn _orig = (orig); \
1844 volatile unsigned long _argvec[6]; \
1845 volatile unsigned long _res; \
1846 _argvec[0] = (unsigned long)_orig.nraddr; \
1847 _argvec[1] = (unsigned long)(arg1); \
1848 _argvec[2] = (unsigned long)(arg2); \
1849 _argvec[3] = (unsigned long)(arg3); \
1850 _argvec[4] = (unsigned long)(arg4); \
1851 _argvec[5] = (unsigned long)(arg5); \
1852 __asm__ volatile( \
1853 VALGRIND_CFI_PROLOGUE \
1854 VALGRIND_ALIGN_STACK \
1855 "subq $128,%%rsp\n\t" \
1856 "movq 40(%%rax), %%r8\n\t" \
1857 "movq 32(%%rax), %%rcx\n\t" \
1858 "movq 24(%%rax), %%rdx\n\t" \
1859 "movq 16(%%rax), %%rsi\n\t" \
1860 "movq 8(%%rax), %%rdi\n\t" \
1861 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1862 VALGRIND_CALL_NOREDIR_RAX \
1863 VALGRIND_RESTORE_STACK \
1864 VALGRIND_CFI_EPILOGUE \
1865 : /*out*/ "=a" (_res) \
1866 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1867 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1868 ); \
1869 lval = (__typeof__(lval)) _res; \
1870 } while (0)
1871
1872#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1873 do { \
1874 volatile OrigFn _orig = (orig); \
1875 volatile unsigned long _argvec[7]; \
1876 volatile unsigned long _res; \
1877 _argvec[0] = (unsigned long)_orig.nraddr; \
1878 _argvec[1] = (unsigned long)(arg1); \
1879 _argvec[2] = (unsigned long)(arg2); \
1880 _argvec[3] = (unsigned long)(arg3); \
1881 _argvec[4] = (unsigned long)(arg4); \
1882 _argvec[5] = (unsigned long)(arg5); \
1883 _argvec[6] = (unsigned long)(arg6); \
1884 __asm__ volatile( \
1885 VALGRIND_CFI_PROLOGUE \
1886 VALGRIND_ALIGN_STACK \
1887 "subq $128,%%rsp\n\t" \
1888 "movq 48(%%rax), %%r9\n\t" \
1889 "movq 40(%%rax), %%r8\n\t" \
1890 "movq 32(%%rax), %%rcx\n\t" \
1891 "movq 24(%%rax), %%rdx\n\t" \
1892 "movq 16(%%rax), %%rsi\n\t" \
1893 "movq 8(%%rax), %%rdi\n\t" \
1894 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1895 VALGRIND_CALL_NOREDIR_RAX \
1896 VALGRIND_RESTORE_STACK \
1897 VALGRIND_CFI_EPILOGUE \
1898 : /*out*/ "=a" (_res) \
1899 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1900 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1901 ); \
1902 lval = (__typeof__(lval)) _res; \
1903 } while (0)
1904
1905#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1906 arg7) \
1907 do { \
1908 volatile OrigFn _orig = (orig); \
1909 volatile unsigned long _argvec[8]; \
1910 volatile unsigned long _res; \
1911 _argvec[0] = (unsigned long)_orig.nraddr; \
1912 _argvec[1] = (unsigned long)(arg1); \
1913 _argvec[2] = (unsigned long)(arg2); \
1914 _argvec[3] = (unsigned long)(arg3); \
1915 _argvec[4] = (unsigned long)(arg4); \
1916 _argvec[5] = (unsigned long)(arg5); \
1917 _argvec[6] = (unsigned long)(arg6); \
1918 _argvec[7] = (unsigned long)(arg7); \
1919 __asm__ volatile( \
1920 VALGRIND_CFI_PROLOGUE \
1921 VALGRIND_ALIGN_STACK \
1922 "subq $136,%%rsp\n\t" \
1923 "pushq 56(%%rax)\n\t" \
1924 "movq 48(%%rax), %%r9\n\t" \
1925 "movq 40(%%rax), %%r8\n\t" \
1926 "movq 32(%%rax), %%rcx\n\t" \
1927 "movq 24(%%rax), %%rdx\n\t" \
1928 "movq 16(%%rax), %%rsi\n\t" \
1929 "movq 8(%%rax), %%rdi\n\t" \
1930 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1931 VALGRIND_CALL_NOREDIR_RAX \
1932 VALGRIND_RESTORE_STACK \
1933 VALGRIND_CFI_EPILOGUE \
1934 : /*out*/ "=a" (_res) \
1935 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1936 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1937 ); \
1938 lval = (__typeof__(lval)) _res; \
1939 } while (0)
1940
1941#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1942 arg7,arg8) \
1943 do { \
1944 volatile OrigFn _orig = (orig); \
1945 volatile unsigned long _argvec[9]; \
1946 volatile unsigned long _res; \
1947 _argvec[0] = (unsigned long)_orig.nraddr; \
1948 _argvec[1] = (unsigned long)(arg1); \
1949 _argvec[2] = (unsigned long)(arg2); \
1950 _argvec[3] = (unsigned long)(arg3); \
1951 _argvec[4] = (unsigned long)(arg4); \
1952 _argvec[5] = (unsigned long)(arg5); \
1953 _argvec[6] = (unsigned long)(arg6); \
1954 _argvec[7] = (unsigned long)(arg7); \
1955 _argvec[8] = (unsigned long)(arg8); \
1956 __asm__ volatile( \
1957 VALGRIND_CFI_PROLOGUE \
1958 VALGRIND_ALIGN_STACK \
1959 "subq $128,%%rsp\n\t" \
1960 "pushq 64(%%rax)\n\t" \
1961 "pushq 56(%%rax)\n\t" \
1962 "movq 48(%%rax), %%r9\n\t" \
1963 "movq 40(%%rax), %%r8\n\t" \
1964 "movq 32(%%rax), %%rcx\n\t" \
1965 "movq 24(%%rax), %%rdx\n\t" \
1966 "movq 16(%%rax), %%rsi\n\t" \
1967 "movq 8(%%rax), %%rdi\n\t" \
1968 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1969 VALGRIND_CALL_NOREDIR_RAX \
1970 VALGRIND_RESTORE_STACK \
1971 VALGRIND_CFI_EPILOGUE \
1972 : /*out*/ "=a" (_res) \
1973 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1974 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1975 ); \
1976 lval = (__typeof__(lval)) _res; \
1977 } while (0)
1978
1979#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1980 arg7,arg8,arg9) \
1981 do { \
1982 volatile OrigFn _orig = (orig); \
1983 volatile unsigned long _argvec[10]; \
1984 volatile unsigned long _res; \
1985 _argvec[0] = (unsigned long)_orig.nraddr; \
1986 _argvec[1] = (unsigned long)(arg1); \
1987 _argvec[2] = (unsigned long)(arg2); \
1988 _argvec[3] = (unsigned long)(arg3); \
1989 _argvec[4] = (unsigned long)(arg4); \
1990 _argvec[5] = (unsigned long)(arg5); \
1991 _argvec[6] = (unsigned long)(arg6); \
1992 _argvec[7] = (unsigned long)(arg7); \
1993 _argvec[8] = (unsigned long)(arg8); \
1994 _argvec[9] = (unsigned long)(arg9); \
1995 __asm__ volatile( \
1996 VALGRIND_CFI_PROLOGUE \
1997 VALGRIND_ALIGN_STACK \
1998 "subq $136,%%rsp\n\t" \
1999 "pushq 72(%%rax)\n\t" \
2000 "pushq 64(%%rax)\n\t" \
2001 "pushq 56(%%rax)\n\t" \
2002 "movq 48(%%rax), %%r9\n\t" \
2003 "movq 40(%%rax), %%r8\n\t" \
2004 "movq 32(%%rax), %%rcx\n\t" \
2005 "movq 24(%%rax), %%rdx\n\t" \
2006 "movq 16(%%rax), %%rsi\n\t" \
2007 "movq 8(%%rax), %%rdi\n\t" \
2008 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2009 VALGRIND_CALL_NOREDIR_RAX \
2010 VALGRIND_RESTORE_STACK \
2011 VALGRIND_CFI_EPILOGUE \
2012 : /*out*/ "=a" (_res) \
2013 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2014 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2015 ); \
2016 lval = (__typeof__(lval)) _res; \
2017 } while (0)
2018
2019#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2020 arg7,arg8,arg9,arg10) \
2021 do { \
2022 volatile OrigFn _orig = (orig); \
2023 volatile unsigned long _argvec[11]; \
2024 volatile unsigned long _res; \
2025 _argvec[0] = (unsigned long)_orig.nraddr; \
2026 _argvec[1] = (unsigned long)(arg1); \
2027 _argvec[2] = (unsigned long)(arg2); \
2028 _argvec[3] = (unsigned long)(arg3); \
2029 _argvec[4] = (unsigned long)(arg4); \
2030 _argvec[5] = (unsigned long)(arg5); \
2031 _argvec[6] = (unsigned long)(arg6); \
2032 _argvec[7] = (unsigned long)(arg7); \
2033 _argvec[8] = (unsigned long)(arg8); \
2034 _argvec[9] = (unsigned long)(arg9); \
2035 _argvec[10] = (unsigned long)(arg10); \
2036 __asm__ volatile( \
2037 VALGRIND_CFI_PROLOGUE \
2038 VALGRIND_ALIGN_STACK \
2039 "subq $128,%%rsp\n\t" \
2040 "pushq 80(%%rax)\n\t" \
2041 "pushq 72(%%rax)\n\t" \
2042 "pushq 64(%%rax)\n\t" \
2043 "pushq 56(%%rax)\n\t" \
2044 "movq 48(%%rax), %%r9\n\t" \
2045 "movq 40(%%rax), %%r8\n\t" \
2046 "movq 32(%%rax), %%rcx\n\t" \
2047 "movq 24(%%rax), %%rdx\n\t" \
2048 "movq 16(%%rax), %%rsi\n\t" \
2049 "movq 8(%%rax), %%rdi\n\t" \
2050 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2051 VALGRIND_CALL_NOREDIR_RAX \
2052 VALGRIND_RESTORE_STACK \
2053 VALGRIND_CFI_EPILOGUE \
2054 : /*out*/ "=a" (_res) \
2055 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2056 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2057 ); \
2058 lval = (__typeof__(lval)) _res; \
2059 } while (0)
2060
2061#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2062 arg7,arg8,arg9,arg10,arg11) \
2063 do { \
2064 volatile OrigFn _orig = (orig); \
2065 volatile unsigned long _argvec[12]; \
2066 volatile unsigned long _res; \
2067 _argvec[0] = (unsigned long)_orig.nraddr; \
2068 _argvec[1] = (unsigned long)(arg1); \
2069 _argvec[2] = (unsigned long)(arg2); \
2070 _argvec[3] = (unsigned long)(arg3); \
2071 _argvec[4] = (unsigned long)(arg4); \
2072 _argvec[5] = (unsigned long)(arg5); \
2073 _argvec[6] = (unsigned long)(arg6); \
2074 _argvec[7] = (unsigned long)(arg7); \
2075 _argvec[8] = (unsigned long)(arg8); \
2076 _argvec[9] = (unsigned long)(arg9); \
2077 _argvec[10] = (unsigned long)(arg10); \
2078 _argvec[11] = (unsigned long)(arg11); \
2079 __asm__ volatile( \
2080 VALGRIND_CFI_PROLOGUE \
2081 VALGRIND_ALIGN_STACK \
2082 "subq $136,%%rsp\n\t" \
2083 "pushq 88(%%rax)\n\t" \
2084 "pushq 80(%%rax)\n\t" \
2085 "pushq 72(%%rax)\n\t" \
2086 "pushq 64(%%rax)\n\t" \
2087 "pushq 56(%%rax)\n\t" \
2088 "movq 48(%%rax), %%r9\n\t" \
2089 "movq 40(%%rax), %%r8\n\t" \
2090 "movq 32(%%rax), %%rcx\n\t" \
2091 "movq 24(%%rax), %%rdx\n\t" \
2092 "movq 16(%%rax), %%rsi\n\t" \
2093 "movq 8(%%rax), %%rdi\n\t" \
2094 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2095 VALGRIND_CALL_NOREDIR_RAX \
2096 VALGRIND_RESTORE_STACK \
2097 VALGRIND_CFI_EPILOGUE \
2098 : /*out*/ "=a" (_res) \
2099 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2100 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2101 ); \
2102 lval = (__typeof__(lval)) _res; \
2103 } while (0)
2104
2105#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2106 arg7,arg8,arg9,arg10,arg11,arg12) \
2107 do { \
2108 volatile OrigFn _orig = (orig); \
2109 volatile unsigned long _argvec[13]; \
2110 volatile unsigned long _res; \
2111 _argvec[0] = (unsigned long)_orig.nraddr; \
2112 _argvec[1] = (unsigned long)(arg1); \
2113 _argvec[2] = (unsigned long)(arg2); \
2114 _argvec[3] = (unsigned long)(arg3); \
2115 _argvec[4] = (unsigned long)(arg4); \
2116 _argvec[5] = (unsigned long)(arg5); \
2117 _argvec[6] = (unsigned long)(arg6); \
2118 _argvec[7] = (unsigned long)(arg7); \
2119 _argvec[8] = (unsigned long)(arg8); \
2120 _argvec[9] = (unsigned long)(arg9); \
2121 _argvec[10] = (unsigned long)(arg10); \
2122 _argvec[11] = (unsigned long)(arg11); \
2123 _argvec[12] = (unsigned long)(arg12); \
2124 __asm__ volatile( \
2125 VALGRIND_CFI_PROLOGUE \
2126 VALGRIND_ALIGN_STACK \
2127 "subq $128,%%rsp\n\t" \
2128 "pushq 96(%%rax)\n\t" \
2129 "pushq 88(%%rax)\n\t" \
2130 "pushq 80(%%rax)\n\t" \
2131 "pushq 72(%%rax)\n\t" \
2132 "pushq 64(%%rax)\n\t" \
2133 "pushq 56(%%rax)\n\t" \
2134 "movq 48(%%rax), %%r9\n\t" \
2135 "movq 40(%%rax), %%r8\n\t" \
2136 "movq 32(%%rax), %%rcx\n\t" \
2137 "movq 24(%%rax), %%rdx\n\t" \
2138 "movq 16(%%rax), %%rsi\n\t" \
2139 "movq 8(%%rax), %%rdi\n\t" \
2140 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2141 VALGRIND_CALL_NOREDIR_RAX \
2142 VALGRIND_RESTORE_STACK \
2143 VALGRIND_CFI_EPILOGUE \
2144 : /*out*/ "=a" (_res) \
2145 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2146 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2147 ); \
2148 lval = (__typeof__(lval)) _res; \
2149 } while (0)
2150
2151#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2152
2153/* ------------------------ ppc32-linux ------------------------ */
2154
2155#if defined(PLAT_ppc32_linux)
2156
2157/* This is useful for finding out about the on-stack stuff:
2158
2159 extern int f9 ( int,int,int,int,int,int,int,int,int );
2160 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2161 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2162 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2163
2164 int g9 ( void ) {
2165 return f9(11,22,33,44,55,66,77,88,99);
2166 }
2167 int g10 ( void ) {
2168 return f10(11,22,33,44,55,66,77,88,99,110);
2169 }
2170 int g11 ( void ) {
2171 return f11(11,22,33,44,55,66,77,88,99,110,121);
2172 }
2173 int g12 ( void ) {
2174 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2175 }
2176*/
2177
2178/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2179
2180/* These regs are trashed by the hidden call. */
2181#define __CALLER_SAVED_REGS \
2182 "lr", "ctr", "xer", \
2183 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2184 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2185 "r11", "r12", "r13"
2186
2187/* Macros to save and align the stack before making a function
2188 call and restore it afterwards as gcc may not keep the stack
2189 pointer aligned if it doesn't realise calls are being made
2190 to other functions. */
2191
2192#define VALGRIND_ALIGN_STACK \
2193 "mr 28,1\n\t" \
2194 "rlwinm 1,1,0,0,27\n\t"
2195#define VALGRIND_RESTORE_STACK \
2196 "mr 1,28\n\t"
2197
2198/* These CALL_FN_ macros assume that on ppc32-linux,
2199 sizeof(unsigned long) == 4. */
2200
2201#define CALL_FN_W_v(lval, orig) \
2202 do { \
2203 volatile OrigFn _orig = (orig); \
2204 volatile unsigned long _argvec[1]; \
2205 volatile unsigned long _res; \
2206 _argvec[0] = (unsigned long)_orig.nraddr; \
2207 __asm__ volatile( \
2208 VALGRIND_ALIGN_STACK \
2209 "mr 11,%1\n\t" \
2210 "lwz 11,0(11)\n\t" /* target->r11 */ \
2211 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2212 VALGRIND_RESTORE_STACK \
2213 "mr %0,3" \
2214 : /*out*/ "=r" (_res) \
2215 : /*in*/ "r" (&_argvec[0]) \
2216 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2217 ); \
2218 lval = (__typeof__(lval)) _res; \
2219 } while (0)
2220
2221#define CALL_FN_W_W(lval, orig, arg1) \
2222 do { \
2223 volatile OrigFn _orig = (orig); \
2224 volatile unsigned long _argvec[2]; \
2225 volatile unsigned long _res; \
2226 _argvec[0] = (unsigned long)_orig.nraddr; \
2227 _argvec[1] = (unsigned long)arg1; \
2228 __asm__ volatile( \
2229 VALGRIND_ALIGN_STACK \
2230 "mr 11,%1\n\t" \
2231 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2232 "lwz 11,0(11)\n\t" /* target->r11 */ \
2233 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2234 VALGRIND_RESTORE_STACK \
2235 "mr %0,3" \
2236 : /*out*/ "=r" (_res) \
2237 : /*in*/ "r" (&_argvec[0]) \
2238 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2239 ); \
2240 lval = (__typeof__(lval)) _res; \
2241 } while (0)
2242
2243#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2244 do { \
2245 volatile OrigFn _orig = (orig); \
2246 volatile unsigned long _argvec[3]; \
2247 volatile unsigned long _res; \
2248 _argvec[0] = (unsigned long)_orig.nraddr; \
2249 _argvec[1] = (unsigned long)arg1; \
2250 _argvec[2] = (unsigned long)arg2; \
2251 __asm__ volatile( \
2252 VALGRIND_ALIGN_STACK \
2253 "mr 11,%1\n\t" \
2254 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2255 "lwz 4,8(11)\n\t" \
2256 "lwz 11,0(11)\n\t" /* target->r11 */ \
2257 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2258 VALGRIND_RESTORE_STACK \
2259 "mr %0,3" \
2260 : /*out*/ "=r" (_res) \
2261 : /*in*/ "r" (&_argvec[0]) \
2262 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2263 ); \
2264 lval = (__typeof__(lval)) _res; \
2265 } while (0)
2266
2267#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2268 do { \
2269 volatile OrigFn _orig = (orig); \
2270 volatile unsigned long _argvec[4]; \
2271 volatile unsigned long _res; \
2272 _argvec[0] = (unsigned long)_orig.nraddr; \
2273 _argvec[1] = (unsigned long)arg1; \
2274 _argvec[2] = (unsigned long)arg2; \
2275 _argvec[3] = (unsigned long)arg3; \
2276 __asm__ volatile( \
2277 VALGRIND_ALIGN_STACK \
2278 "mr 11,%1\n\t" \
2279 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2280 "lwz 4,8(11)\n\t" \
2281 "lwz 5,12(11)\n\t" \
2282 "lwz 11,0(11)\n\t" /* target->r11 */ \
2283 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2284 VALGRIND_RESTORE_STACK \
2285 "mr %0,3" \
2286 : /*out*/ "=r" (_res) \
2287 : /*in*/ "r" (&_argvec[0]) \
2288 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2289 ); \
2290 lval = (__typeof__(lval)) _res; \
2291 } while (0)
2292
2293#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2294 do { \
2295 volatile OrigFn _orig = (orig); \
2296 volatile unsigned long _argvec[5]; \
2297 volatile unsigned long _res; \
2298 _argvec[0] = (unsigned long)_orig.nraddr; \
2299 _argvec[1] = (unsigned long)arg1; \
2300 _argvec[2] = (unsigned long)arg2; \
2301 _argvec[3] = (unsigned long)arg3; \
2302 _argvec[4] = (unsigned long)arg4; \
2303 __asm__ volatile( \
2304 VALGRIND_ALIGN_STACK \
2305 "mr 11,%1\n\t" \
2306 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2307 "lwz 4,8(11)\n\t" \
2308 "lwz 5,12(11)\n\t" \
2309 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2310 "lwz 11,0(11)\n\t" /* target->r11 */ \
2311 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2312 VALGRIND_RESTORE_STACK \
2313 "mr %0,3" \
2314 : /*out*/ "=r" (_res) \
2315 : /*in*/ "r" (&_argvec[0]) \
2316 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2317 ); \
2318 lval = (__typeof__(lval)) _res; \
2319 } while (0)
2320
2321#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2322 do { \
2323 volatile OrigFn _orig = (orig); \
2324 volatile unsigned long _argvec[6]; \
2325 volatile unsigned long _res; \
2326 _argvec[0] = (unsigned long)_orig.nraddr; \
2327 _argvec[1] = (unsigned long)arg1; \
2328 _argvec[2] = (unsigned long)arg2; \
2329 _argvec[3] = (unsigned long)arg3; \
2330 _argvec[4] = (unsigned long)arg4; \
2331 _argvec[5] = (unsigned long)arg5; \
2332 __asm__ volatile( \
2333 VALGRIND_ALIGN_STACK \
2334 "mr 11,%1\n\t" \
2335 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2336 "lwz 4,8(11)\n\t" \
2337 "lwz 5,12(11)\n\t" \
2338 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2339 "lwz 7,20(11)\n\t" \
2340 "lwz 11,0(11)\n\t" /* target->r11 */ \
2341 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2342 VALGRIND_RESTORE_STACK \
2343 "mr %0,3" \
2344 : /*out*/ "=r" (_res) \
2345 : /*in*/ "r" (&_argvec[0]) \
2346 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2347 ); \
2348 lval = (__typeof__(lval)) _res; \
2349 } while (0)
2350
2351#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2352 do { \
2353 volatile OrigFn _orig = (orig); \
2354 volatile unsigned long _argvec[7]; \
2355 volatile unsigned long _res; \
2356 _argvec[0] = (unsigned long)_orig.nraddr; \
2357 _argvec[1] = (unsigned long)arg1; \
2358 _argvec[2] = (unsigned long)arg2; \
2359 _argvec[3] = (unsigned long)arg3; \
2360 _argvec[4] = (unsigned long)arg4; \
2361 _argvec[5] = (unsigned long)arg5; \
2362 _argvec[6] = (unsigned long)arg6; \
2363 __asm__ volatile( \
2364 VALGRIND_ALIGN_STACK \
2365 "mr 11,%1\n\t" \
2366 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2367 "lwz 4,8(11)\n\t" \
2368 "lwz 5,12(11)\n\t" \
2369 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2370 "lwz 7,20(11)\n\t" \
2371 "lwz 8,24(11)\n\t" \
2372 "lwz 11,0(11)\n\t" /* target->r11 */ \
2373 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2374 VALGRIND_RESTORE_STACK \
2375 "mr %0,3" \
2376 : /*out*/ "=r" (_res) \
2377 : /*in*/ "r" (&_argvec[0]) \
2378 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2379 ); \
2380 lval = (__typeof__(lval)) _res; \
2381 } while (0)
2382
2383#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2384 arg7) \
2385 do { \
2386 volatile OrigFn _orig = (orig); \
2387 volatile unsigned long _argvec[8]; \
2388 volatile unsigned long _res; \
2389 _argvec[0] = (unsigned long)_orig.nraddr; \
2390 _argvec[1] = (unsigned long)arg1; \
2391 _argvec[2] = (unsigned long)arg2; \
2392 _argvec[3] = (unsigned long)arg3; \
2393 _argvec[4] = (unsigned long)arg4; \
2394 _argvec[5] = (unsigned long)arg5; \
2395 _argvec[6] = (unsigned long)arg6; \
2396 _argvec[7] = (unsigned long)arg7; \
2397 __asm__ volatile( \
2398 VALGRIND_ALIGN_STACK \
2399 "mr 11,%1\n\t" \
2400 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2401 "lwz 4,8(11)\n\t" \
2402 "lwz 5,12(11)\n\t" \
2403 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2404 "lwz 7,20(11)\n\t" \
2405 "lwz 8,24(11)\n\t" \
2406 "lwz 9,28(11)\n\t" \
2407 "lwz 11,0(11)\n\t" /* target->r11 */ \
2408 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2409 VALGRIND_RESTORE_STACK \
2410 "mr %0,3" \
2411 : /*out*/ "=r" (_res) \
2412 : /*in*/ "r" (&_argvec[0]) \
2413 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2414 ); \
2415 lval = (__typeof__(lval)) _res; \
2416 } while (0)
2417
2418#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2419 arg7,arg8) \
2420 do { \
2421 volatile OrigFn _orig = (orig); \
2422 volatile unsigned long _argvec[9]; \
2423 volatile unsigned long _res; \
2424 _argvec[0] = (unsigned long)_orig.nraddr; \
2425 _argvec[1] = (unsigned long)arg1; \
2426 _argvec[2] = (unsigned long)arg2; \
2427 _argvec[3] = (unsigned long)arg3; \
2428 _argvec[4] = (unsigned long)arg4; \
2429 _argvec[5] = (unsigned long)arg5; \
2430 _argvec[6] = (unsigned long)arg6; \
2431 _argvec[7] = (unsigned long)arg7; \
2432 _argvec[8] = (unsigned long)arg8; \
2433 __asm__ volatile( \
2434 VALGRIND_ALIGN_STACK \
2435 "mr 11,%1\n\t" \
2436 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2437 "lwz 4,8(11)\n\t" \
2438 "lwz 5,12(11)\n\t" \
2439 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2440 "lwz 7,20(11)\n\t" \
2441 "lwz 8,24(11)\n\t" \
2442 "lwz 9,28(11)\n\t" \
2443 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2444 "lwz 11,0(11)\n\t" /* target->r11 */ \
2445 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2446 VALGRIND_RESTORE_STACK \
2447 "mr %0,3" \
2448 : /*out*/ "=r" (_res) \
2449 : /*in*/ "r" (&_argvec[0]) \
2450 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2451 ); \
2452 lval = (__typeof__(lval)) _res; \
2453 } while (0)
2454
2455#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2456 arg7,arg8,arg9) \
2457 do { \
2458 volatile OrigFn _orig = (orig); \
2459 volatile unsigned long _argvec[10]; \
2460 volatile unsigned long _res; \
2461 _argvec[0] = (unsigned long)_orig.nraddr; \
2462 _argvec[1] = (unsigned long)arg1; \
2463 _argvec[2] = (unsigned long)arg2; \
2464 _argvec[3] = (unsigned long)arg3; \
2465 _argvec[4] = (unsigned long)arg4; \
2466 _argvec[5] = (unsigned long)arg5; \
2467 _argvec[6] = (unsigned long)arg6; \
2468 _argvec[7] = (unsigned long)arg7; \
2469 _argvec[8] = (unsigned long)arg8; \
2470 _argvec[9] = (unsigned long)arg9; \
2471 __asm__ volatile( \
2472 VALGRIND_ALIGN_STACK \
2473 "mr 11,%1\n\t" \
2474 "addi 1,1,-16\n\t" \
2475 /* arg9 */ \
2476 "lwz 3,36(11)\n\t" \
2477 "stw 3,8(1)\n\t" \
2478 /* args1-8 */ \
2479 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2480 "lwz 4,8(11)\n\t" \
2481 "lwz 5,12(11)\n\t" \
2482 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2483 "lwz 7,20(11)\n\t" \
2484 "lwz 8,24(11)\n\t" \
2485 "lwz 9,28(11)\n\t" \
2486 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2487 "lwz 11,0(11)\n\t" /* target->r11 */ \
2488 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2489 VALGRIND_RESTORE_STACK \
2490 "mr %0,3" \
2491 : /*out*/ "=r" (_res) \
2492 : /*in*/ "r" (&_argvec[0]) \
2493 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2494 ); \
2495 lval = (__typeof__(lval)) _res; \
2496 } while (0)
2497
2498#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2499 arg7,arg8,arg9,arg10) \
2500 do { \
2501 volatile OrigFn _orig = (orig); \
2502 volatile unsigned long _argvec[11]; \
2503 volatile unsigned long _res; \
2504 _argvec[0] = (unsigned long)_orig.nraddr; \
2505 _argvec[1] = (unsigned long)arg1; \
2506 _argvec[2] = (unsigned long)arg2; \
2507 _argvec[3] = (unsigned long)arg3; \
2508 _argvec[4] = (unsigned long)arg4; \
2509 _argvec[5] = (unsigned long)arg5; \
2510 _argvec[6] = (unsigned long)arg6; \
2511 _argvec[7] = (unsigned long)arg7; \
2512 _argvec[8] = (unsigned long)arg8; \
2513 _argvec[9] = (unsigned long)arg9; \
2514 _argvec[10] = (unsigned long)arg10; \
2515 __asm__ volatile( \
2516 VALGRIND_ALIGN_STACK \
2517 "mr 11,%1\n\t" \
2518 "addi 1,1,-16\n\t" \
2519 /* arg10 */ \
2520 "lwz 3,40(11)\n\t" \
2521 "stw 3,12(1)\n\t" \
2522 /* arg9 */ \
2523 "lwz 3,36(11)\n\t" \
2524 "stw 3,8(1)\n\t" \
2525 /* args1-8 */ \
2526 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2527 "lwz 4,8(11)\n\t" \
2528 "lwz 5,12(11)\n\t" \
2529 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2530 "lwz 7,20(11)\n\t" \
2531 "lwz 8,24(11)\n\t" \
2532 "lwz 9,28(11)\n\t" \
2533 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2534 "lwz 11,0(11)\n\t" /* target->r11 */ \
2535 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2536 VALGRIND_RESTORE_STACK \
2537 "mr %0,3" \
2538 : /*out*/ "=r" (_res) \
2539 : /*in*/ "r" (&_argvec[0]) \
2540 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2541 ); \
2542 lval = (__typeof__(lval)) _res; \
2543 } while (0)
2544
2545#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2546 arg7,arg8,arg9,arg10,arg11) \
2547 do { \
2548 volatile OrigFn _orig = (orig); \
2549 volatile unsigned long _argvec[12]; \
2550 volatile unsigned long _res; \
2551 _argvec[0] = (unsigned long)_orig.nraddr; \
2552 _argvec[1] = (unsigned long)arg1; \
2553 _argvec[2] = (unsigned long)arg2; \
2554 _argvec[3] = (unsigned long)arg3; \
2555 _argvec[4] = (unsigned long)arg4; \
2556 _argvec[5] = (unsigned long)arg5; \
2557 _argvec[6] = (unsigned long)arg6; \
2558 _argvec[7] = (unsigned long)arg7; \
2559 _argvec[8] = (unsigned long)arg8; \
2560 _argvec[9] = (unsigned long)arg9; \
2561 _argvec[10] = (unsigned long)arg10; \
2562 _argvec[11] = (unsigned long)arg11; \
2563 __asm__ volatile( \
2564 VALGRIND_ALIGN_STACK \
2565 "mr 11,%1\n\t" \
2566 "addi 1,1,-32\n\t" \
2567 /* arg11 */ \
2568 "lwz 3,44(11)\n\t" \
2569 "stw 3,16(1)\n\t" \
2570 /* arg10 */ \
2571 "lwz 3,40(11)\n\t" \
2572 "stw 3,12(1)\n\t" \
2573 /* arg9 */ \
2574 "lwz 3,36(11)\n\t" \
2575 "stw 3,8(1)\n\t" \
2576 /* args1-8 */ \
2577 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2578 "lwz 4,8(11)\n\t" \
2579 "lwz 5,12(11)\n\t" \
2580 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2581 "lwz 7,20(11)\n\t" \
2582 "lwz 8,24(11)\n\t" \
2583 "lwz 9,28(11)\n\t" \
2584 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2585 "lwz 11,0(11)\n\t" /* target->r11 */ \
2586 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2587 VALGRIND_RESTORE_STACK \
2588 "mr %0,3" \
2589 : /*out*/ "=r" (_res) \
2590 : /*in*/ "r" (&_argvec[0]) \
2591 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2592 ); \
2593 lval = (__typeof__(lval)) _res; \
2594 } while (0)
2595
2596#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2597 arg7,arg8,arg9,arg10,arg11,arg12) \
2598 do { \
2599 volatile OrigFn _orig = (orig); \
2600 volatile unsigned long _argvec[13]; \
2601 volatile unsigned long _res; \
2602 _argvec[0] = (unsigned long)_orig.nraddr; \
2603 _argvec[1] = (unsigned long)arg1; \
2604 _argvec[2] = (unsigned long)arg2; \
2605 _argvec[3] = (unsigned long)arg3; \
2606 _argvec[4] = (unsigned long)arg4; \
2607 _argvec[5] = (unsigned long)arg5; \
2608 _argvec[6] = (unsigned long)arg6; \
2609 _argvec[7] = (unsigned long)arg7; \
2610 _argvec[8] = (unsigned long)arg8; \
2611 _argvec[9] = (unsigned long)arg9; \
2612 _argvec[10] = (unsigned long)arg10; \
2613 _argvec[11] = (unsigned long)arg11; \
2614 _argvec[12] = (unsigned long)arg12; \
2615 __asm__ volatile( \
2616 VALGRIND_ALIGN_STACK \
2617 "mr 11,%1\n\t" \
2618 "addi 1,1,-32\n\t" \
2619 /* arg12 */ \
2620 "lwz 3,48(11)\n\t" \
2621 "stw 3,20(1)\n\t" \
2622 /* arg11 */ \
2623 "lwz 3,44(11)\n\t" \
2624 "stw 3,16(1)\n\t" \
2625 /* arg10 */ \
2626 "lwz 3,40(11)\n\t" \
2627 "stw 3,12(1)\n\t" \
2628 /* arg9 */ \
2629 "lwz 3,36(11)\n\t" \
2630 "stw 3,8(1)\n\t" \
2631 /* args1-8 */ \
2632 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2633 "lwz 4,8(11)\n\t" \
2634 "lwz 5,12(11)\n\t" \
2635 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2636 "lwz 7,20(11)\n\t" \
2637 "lwz 8,24(11)\n\t" \
2638 "lwz 9,28(11)\n\t" \
2639 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2640 "lwz 11,0(11)\n\t" /* target->r11 */ \
2641 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2642 VALGRIND_RESTORE_STACK \
2643 "mr %0,3" \
2644 : /*out*/ "=r" (_res) \
2645 : /*in*/ "r" (&_argvec[0]) \
2646 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2647 ); \
2648 lval = (__typeof__(lval)) _res; \
2649 } while (0)
2650
2651#endif /* PLAT_ppc32_linux */
2652
2653/* ------------------------ ppc64-linux ------------------------ */
2654
2655#if defined(PLAT_ppc64be_linux)
2656
2657/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2658
2659/* These regs are trashed by the hidden call. */
2660#define __CALLER_SAVED_REGS \
2661 "lr", "ctr", "xer", \
2662 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2663 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2664 "r11", "r12", "r13"
2665
2666/* Macros to save and align the stack before making a function
2667 call and restore it afterwards as gcc may not keep the stack
2668 pointer aligned if it doesn't realise calls are being made
2669 to other functions. */
2670
2671#define VALGRIND_ALIGN_STACK \
2672 "mr 28,1\n\t" \
2673 "rldicr 1,1,0,59\n\t"
2674#define VALGRIND_RESTORE_STACK \
2675 "mr 1,28\n\t"
2676
2677/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2678 long) == 8. */
2679
2680#define CALL_FN_W_v(lval, orig) \
2681 do { \
2682 volatile OrigFn _orig = (orig); \
2683 volatile unsigned long _argvec[3+0]; \
2684 volatile unsigned long _res; \
2685 /* _argvec[0] holds current r2 across the call */ \
2686 _argvec[1] = (unsigned long)_orig.r2; \
2687 _argvec[2] = (unsigned long)_orig.nraddr; \
2688 __asm__ volatile( \
2689 VALGRIND_ALIGN_STACK \
2690 "mr 11,%1\n\t" \
2691 "std 2,-16(11)\n\t" /* save tocptr */ \
2692 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2693 "ld 11, 0(11)\n\t" /* target->r11 */ \
2694 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2695 "mr 11,%1\n\t" \
2696 "mr %0,3\n\t" \
2697 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2698 VALGRIND_RESTORE_STACK \
2699 : /*out*/ "=r" (_res) \
2700 : /*in*/ "r" (&_argvec[2]) \
2701 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2702 ); \
2703 lval = (__typeof__(lval)) _res; \
2704 } while (0)
2705
2706#define CALL_FN_W_W(lval, orig, arg1) \
2707 do { \
2708 volatile OrigFn _orig = (orig); \
2709 volatile unsigned long _argvec[3+1]; \
2710 volatile unsigned long _res; \
2711 /* _argvec[0] holds current r2 across the call */ \
2712 _argvec[1] = (unsigned long)_orig.r2; \
2713 _argvec[2] = (unsigned long)_orig.nraddr; \
2714 _argvec[2+1] = (unsigned long)arg1; \
2715 __asm__ volatile( \
2716 VALGRIND_ALIGN_STACK \
2717 "mr 11,%1\n\t" \
2718 "std 2,-16(11)\n\t" /* save tocptr */ \
2719 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2720 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2721 "ld 11, 0(11)\n\t" /* target->r11 */ \
2722 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2723 "mr 11,%1\n\t" \
2724 "mr %0,3\n\t" \
2725 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2726 VALGRIND_RESTORE_STACK \
2727 : /*out*/ "=r" (_res) \
2728 : /*in*/ "r" (&_argvec[2]) \
2729 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2730 ); \
2731 lval = (__typeof__(lval)) _res; \
2732 } while (0)
2733
2734#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2735 do { \
2736 volatile OrigFn _orig = (orig); \
2737 volatile unsigned long _argvec[3+2]; \
2738 volatile unsigned long _res; \
2739 /* _argvec[0] holds current r2 across the call */ \
2740 _argvec[1] = (unsigned long)_orig.r2; \
2741 _argvec[2] = (unsigned long)_orig.nraddr; \
2742 _argvec[2+1] = (unsigned long)arg1; \
2743 _argvec[2+2] = (unsigned long)arg2; \
2744 __asm__ volatile( \
2745 VALGRIND_ALIGN_STACK \
2746 "mr 11,%1\n\t" \
2747 "std 2,-16(11)\n\t" /* save tocptr */ \
2748 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2749 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2750 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2751 "ld 11, 0(11)\n\t" /* target->r11 */ \
2752 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2753 "mr 11,%1\n\t" \
2754 "mr %0,3\n\t" \
2755 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2756 VALGRIND_RESTORE_STACK \
2757 : /*out*/ "=r" (_res) \
2758 : /*in*/ "r" (&_argvec[2]) \
2759 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2760 ); \
2761 lval = (__typeof__(lval)) _res; \
2762 } while (0)
2763
2764#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2765 do { \
2766 volatile OrigFn _orig = (orig); \
2767 volatile unsigned long _argvec[3+3]; \
2768 volatile unsigned long _res; \
2769 /* _argvec[0] holds current r2 across the call */ \
2770 _argvec[1] = (unsigned long)_orig.r2; \
2771 _argvec[2] = (unsigned long)_orig.nraddr; \
2772 _argvec[2+1] = (unsigned long)arg1; \
2773 _argvec[2+2] = (unsigned long)arg2; \
2774 _argvec[2+3] = (unsigned long)arg3; \
2775 __asm__ volatile( \
2776 VALGRIND_ALIGN_STACK \
2777 "mr 11,%1\n\t" \
2778 "std 2,-16(11)\n\t" /* save tocptr */ \
2779 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2780 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2781 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2782 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2783 "ld 11, 0(11)\n\t" /* target->r11 */ \
2784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2785 "mr 11,%1\n\t" \
2786 "mr %0,3\n\t" \
2787 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2788 VALGRIND_RESTORE_STACK \
2789 : /*out*/ "=r" (_res) \
2790 : /*in*/ "r" (&_argvec[2]) \
2791 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2792 ); \
2793 lval = (__typeof__(lval)) _res; \
2794 } while (0)
2795
2796#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2797 do { \
2798 volatile OrigFn _orig = (orig); \
2799 volatile unsigned long _argvec[3+4]; \
2800 volatile unsigned long _res; \
2801 /* _argvec[0] holds current r2 across the call */ \
2802 _argvec[1] = (unsigned long)_orig.r2; \
2803 _argvec[2] = (unsigned long)_orig.nraddr; \
2804 _argvec[2+1] = (unsigned long)arg1; \
2805 _argvec[2+2] = (unsigned long)arg2; \
2806 _argvec[2+3] = (unsigned long)arg3; \
2807 _argvec[2+4] = (unsigned long)arg4; \
2808 __asm__ volatile( \
2809 VALGRIND_ALIGN_STACK \
2810 "mr 11,%1\n\t" \
2811 "std 2,-16(11)\n\t" /* save tocptr */ \
2812 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2813 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2814 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2815 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2816 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2817 "ld 11, 0(11)\n\t" /* target->r11 */ \
2818 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2819 "mr 11,%1\n\t" \
2820 "mr %0,3\n\t" \
2821 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2822 VALGRIND_RESTORE_STACK \
2823 : /*out*/ "=r" (_res) \
2824 : /*in*/ "r" (&_argvec[2]) \
2825 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2826 ); \
2827 lval = (__typeof__(lval)) _res; \
2828 } while (0)
2829
2830#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2831 do { \
2832 volatile OrigFn _orig = (orig); \
2833 volatile unsigned long _argvec[3+5]; \
2834 volatile unsigned long _res; \
2835 /* _argvec[0] holds current r2 across the call */ \
2836 _argvec[1] = (unsigned long)_orig.r2; \
2837 _argvec[2] = (unsigned long)_orig.nraddr; \
2838 _argvec[2+1] = (unsigned long)arg1; \
2839 _argvec[2+2] = (unsigned long)arg2; \
2840 _argvec[2+3] = (unsigned long)arg3; \
2841 _argvec[2+4] = (unsigned long)arg4; \
2842 _argvec[2+5] = (unsigned long)arg5; \
2843 __asm__ volatile( \
2844 VALGRIND_ALIGN_STACK \
2845 "mr 11,%1\n\t" \
2846 "std 2,-16(11)\n\t" /* save tocptr */ \
2847 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2848 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2849 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2850 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2851 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2852 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2853 "ld 11, 0(11)\n\t" /* target->r11 */ \
2854 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2855 "mr 11,%1\n\t" \
2856 "mr %0,3\n\t" \
2857 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2858 VALGRIND_RESTORE_STACK \
2859 : /*out*/ "=r" (_res) \
2860 : /*in*/ "r" (&_argvec[2]) \
2861 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2862 ); \
2863 lval = (__typeof__(lval)) _res; \
2864 } while (0)
2865
2866#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2867 do { \
2868 volatile OrigFn _orig = (orig); \
2869 volatile unsigned long _argvec[3+6]; \
2870 volatile unsigned long _res; \
2871 /* _argvec[0] holds current r2 across the call */ \
2872 _argvec[1] = (unsigned long)_orig.r2; \
2873 _argvec[2] = (unsigned long)_orig.nraddr; \
2874 _argvec[2+1] = (unsigned long)arg1; \
2875 _argvec[2+2] = (unsigned long)arg2; \
2876 _argvec[2+3] = (unsigned long)arg3; \
2877 _argvec[2+4] = (unsigned long)arg4; \
2878 _argvec[2+5] = (unsigned long)arg5; \
2879 _argvec[2+6] = (unsigned long)arg6; \
2880 __asm__ volatile( \
2881 VALGRIND_ALIGN_STACK \
2882 "mr 11,%1\n\t" \
2883 "std 2,-16(11)\n\t" /* save tocptr */ \
2884 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2885 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2886 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2887 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2888 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2889 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2890 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2891 "ld 11, 0(11)\n\t" /* target->r11 */ \
2892 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2893 "mr 11,%1\n\t" \
2894 "mr %0,3\n\t" \
2895 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2896 VALGRIND_RESTORE_STACK \
2897 : /*out*/ "=r" (_res) \
2898 : /*in*/ "r" (&_argvec[2]) \
2899 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2900 ); \
2901 lval = (__typeof__(lval)) _res; \
2902 } while (0)
2903
2904#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2905 arg7) \
2906 do { \
2907 volatile OrigFn _orig = (orig); \
2908 volatile unsigned long _argvec[3+7]; \
2909 volatile unsigned long _res; \
2910 /* _argvec[0] holds current r2 across the call */ \
2911 _argvec[1] = (unsigned long)_orig.r2; \
2912 _argvec[2] = (unsigned long)_orig.nraddr; \
2913 _argvec[2+1] = (unsigned long)arg1; \
2914 _argvec[2+2] = (unsigned long)arg2; \
2915 _argvec[2+3] = (unsigned long)arg3; \
2916 _argvec[2+4] = (unsigned long)arg4; \
2917 _argvec[2+5] = (unsigned long)arg5; \
2918 _argvec[2+6] = (unsigned long)arg6; \
2919 _argvec[2+7] = (unsigned long)arg7; \
2920 __asm__ volatile( \
2921 VALGRIND_ALIGN_STACK \
2922 "mr 11,%1\n\t" \
2923 "std 2,-16(11)\n\t" /* save tocptr */ \
2924 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2925 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2926 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2927 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2928 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2929 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2930 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2931 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2932 "ld 11, 0(11)\n\t" /* target->r11 */ \
2933 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2934 "mr 11,%1\n\t" \
2935 "mr %0,3\n\t" \
2936 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2937 VALGRIND_RESTORE_STACK \
2938 : /*out*/ "=r" (_res) \
2939 : /*in*/ "r" (&_argvec[2]) \
2940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2941 ); \
2942 lval = (__typeof__(lval)) _res; \
2943 } while (0)
2944
2945#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2946 arg7,arg8) \
2947 do { \
2948 volatile OrigFn _orig = (orig); \
2949 volatile unsigned long _argvec[3+8]; \
2950 volatile unsigned long _res; \
2951 /* _argvec[0] holds current r2 across the call */ \
2952 _argvec[1] = (unsigned long)_orig.r2; \
2953 _argvec[2] = (unsigned long)_orig.nraddr; \
2954 _argvec[2+1] = (unsigned long)arg1; \
2955 _argvec[2+2] = (unsigned long)arg2; \
2956 _argvec[2+3] = (unsigned long)arg3; \
2957 _argvec[2+4] = (unsigned long)arg4; \
2958 _argvec[2+5] = (unsigned long)arg5; \
2959 _argvec[2+6] = (unsigned long)arg6; \
2960 _argvec[2+7] = (unsigned long)arg7; \
2961 _argvec[2+8] = (unsigned long)arg8; \
2962 __asm__ volatile( \
2963 VALGRIND_ALIGN_STACK \
2964 "mr 11,%1\n\t" \
2965 "std 2,-16(11)\n\t" /* save tocptr */ \
2966 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2967 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2968 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2969 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2970 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2971 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2972 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2973 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2974 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2975 "ld 11, 0(11)\n\t" /* target->r11 */ \
2976 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2977 "mr 11,%1\n\t" \
2978 "mr %0,3\n\t" \
2979 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2980 VALGRIND_RESTORE_STACK \
2981 : /*out*/ "=r" (_res) \
2982 : /*in*/ "r" (&_argvec[2]) \
2983 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2984 ); \
2985 lval = (__typeof__(lval)) _res; \
2986 } while (0)
2987
2988#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2989 arg7,arg8,arg9) \
2990 do { \
2991 volatile OrigFn _orig = (orig); \
2992 volatile unsigned long _argvec[3+9]; \
2993 volatile unsigned long _res; \
2994 /* _argvec[0] holds current r2 across the call */ \
2995 _argvec[1] = (unsigned long)_orig.r2; \
2996 _argvec[2] = (unsigned long)_orig.nraddr; \
2997 _argvec[2+1] = (unsigned long)arg1; \
2998 _argvec[2+2] = (unsigned long)arg2; \
2999 _argvec[2+3] = (unsigned long)arg3; \
3000 _argvec[2+4] = (unsigned long)arg4; \
3001 _argvec[2+5] = (unsigned long)arg5; \
3002 _argvec[2+6] = (unsigned long)arg6; \
3003 _argvec[2+7] = (unsigned long)arg7; \
3004 _argvec[2+8] = (unsigned long)arg8; \
3005 _argvec[2+9] = (unsigned long)arg9; \
3006 __asm__ volatile( \
3007 VALGRIND_ALIGN_STACK \
3008 "mr 11,%1\n\t" \
3009 "std 2,-16(11)\n\t" /* save tocptr */ \
3010 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3011 "addi 1,1,-128\n\t" /* expand stack frame */ \
3012 /* arg9 */ \
3013 "ld 3,72(11)\n\t" \
3014 "std 3,112(1)\n\t" \
3015 /* args1-8 */ \
3016 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3017 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3018 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3019 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3020 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3021 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3022 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3023 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3024 "ld 11, 0(11)\n\t" /* target->r11 */ \
3025 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3026 "mr 11,%1\n\t" \
3027 "mr %0,3\n\t" \
3028 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3029 VALGRIND_RESTORE_STACK \
3030 : /*out*/ "=r" (_res) \
3031 : /*in*/ "r" (&_argvec[2]) \
3032 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3033 ); \
3034 lval = (__typeof__(lval)) _res; \
3035 } while (0)
3036
3037#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3038 arg7,arg8,arg9,arg10) \
3039 do { \
3040 volatile OrigFn _orig = (orig); \
3041 volatile unsigned long _argvec[3+10]; \
3042 volatile unsigned long _res; \
3043 /* _argvec[0] holds current r2 across the call */ \
3044 _argvec[1] = (unsigned long)_orig.r2; \
3045 _argvec[2] = (unsigned long)_orig.nraddr; \
3046 _argvec[2+1] = (unsigned long)arg1; \
3047 _argvec[2+2] = (unsigned long)arg2; \
3048 _argvec[2+3] = (unsigned long)arg3; \
3049 _argvec[2+4] = (unsigned long)arg4; \
3050 _argvec[2+5] = (unsigned long)arg5; \
3051 _argvec[2+6] = (unsigned long)arg6; \
3052 _argvec[2+7] = (unsigned long)arg7; \
3053 _argvec[2+8] = (unsigned long)arg8; \
3054 _argvec[2+9] = (unsigned long)arg9; \
3055 _argvec[2+10] = (unsigned long)arg10; \
3056 __asm__ volatile( \
3057 VALGRIND_ALIGN_STACK \
3058 "mr 11,%1\n\t" \
3059 "std 2,-16(11)\n\t" /* save tocptr */ \
3060 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3061 "addi 1,1,-128\n\t" /* expand stack frame */ \
3062 /* arg10 */ \
3063 "ld 3,80(11)\n\t" \
3064 "std 3,120(1)\n\t" \
3065 /* arg9 */ \
3066 "ld 3,72(11)\n\t" \
3067 "std 3,112(1)\n\t" \
3068 /* args1-8 */ \
3069 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3070 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3071 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3072 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3073 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3074 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3075 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3076 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3077 "ld 11, 0(11)\n\t" /* target->r11 */ \
3078 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3079 "mr 11,%1\n\t" \
3080 "mr %0,3\n\t" \
3081 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3082 VALGRIND_RESTORE_STACK \
3083 : /*out*/ "=r" (_res) \
3084 : /*in*/ "r" (&_argvec[2]) \
3085 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3086 ); \
3087 lval = (__typeof__(lval)) _res; \
3088 } while (0)
3089
3090#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3091 arg7,arg8,arg9,arg10,arg11) \
3092 do { \
3093 volatile OrigFn _orig = (orig); \
3094 volatile unsigned long _argvec[3+11]; \
3095 volatile unsigned long _res; \
3096 /* _argvec[0] holds current r2 across the call */ \
3097 _argvec[1] = (unsigned long)_orig.r2; \
3098 _argvec[2] = (unsigned long)_orig.nraddr; \
3099 _argvec[2+1] = (unsigned long)arg1; \
3100 _argvec[2+2] = (unsigned long)arg2; \
3101 _argvec[2+3] = (unsigned long)arg3; \
3102 _argvec[2+4] = (unsigned long)arg4; \
3103 _argvec[2+5] = (unsigned long)arg5; \
3104 _argvec[2+6] = (unsigned long)arg6; \
3105 _argvec[2+7] = (unsigned long)arg7; \
3106 _argvec[2+8] = (unsigned long)arg8; \
3107 _argvec[2+9] = (unsigned long)arg9; \
3108 _argvec[2+10] = (unsigned long)arg10; \
3109 _argvec[2+11] = (unsigned long)arg11; \
3110 __asm__ volatile( \
3111 VALGRIND_ALIGN_STACK \
3112 "mr 11,%1\n\t" \
3113 "std 2,-16(11)\n\t" /* save tocptr */ \
3114 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3115 "addi 1,1,-144\n\t" /* expand stack frame */ \
3116 /* arg11 */ \
3117 "ld 3,88(11)\n\t" \
3118 "std 3,128(1)\n\t" \
3119 /* arg10 */ \
3120 "ld 3,80(11)\n\t" \
3121 "std 3,120(1)\n\t" \
3122 /* arg9 */ \
3123 "ld 3,72(11)\n\t" \
3124 "std 3,112(1)\n\t" \
3125 /* args1-8 */ \
3126 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3127 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3128 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3129 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3130 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3131 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3132 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3133 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3134 "ld 11, 0(11)\n\t" /* target->r11 */ \
3135 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3136 "mr 11,%1\n\t" \
3137 "mr %0,3\n\t" \
3138 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3139 VALGRIND_RESTORE_STACK \
3140 : /*out*/ "=r" (_res) \
3141 : /*in*/ "r" (&_argvec[2]) \
3142 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3143 ); \
3144 lval = (__typeof__(lval)) _res; \
3145 } while (0)
3146
3147#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3148 arg7,arg8,arg9,arg10,arg11,arg12) \
3149 do { \
3150 volatile OrigFn _orig = (orig); \
3151 volatile unsigned long _argvec[3+12]; \
3152 volatile unsigned long _res; \
3153 /* _argvec[0] holds current r2 across the call */ \
3154 _argvec[1] = (unsigned long)_orig.r2; \
3155 _argvec[2] = (unsigned long)_orig.nraddr; \
3156 _argvec[2+1] = (unsigned long)arg1; \
3157 _argvec[2+2] = (unsigned long)arg2; \
3158 _argvec[2+3] = (unsigned long)arg3; \
3159 _argvec[2+4] = (unsigned long)arg4; \
3160 _argvec[2+5] = (unsigned long)arg5; \
3161 _argvec[2+6] = (unsigned long)arg6; \
3162 _argvec[2+7] = (unsigned long)arg7; \
3163 _argvec[2+8] = (unsigned long)arg8; \
3164 _argvec[2+9] = (unsigned long)arg9; \
3165 _argvec[2+10] = (unsigned long)arg10; \
3166 _argvec[2+11] = (unsigned long)arg11; \
3167 _argvec[2+12] = (unsigned long)arg12; \
3168 __asm__ volatile( \
3169 VALGRIND_ALIGN_STACK \
3170 "mr 11,%1\n\t" \
3171 "std 2,-16(11)\n\t" /* save tocptr */ \
3172 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3173 "addi 1,1,-144\n\t" /* expand stack frame */ \
3174 /* arg12 */ \
3175 "ld 3,96(11)\n\t" \
3176 "std 3,136(1)\n\t" \
3177 /* arg11 */ \
3178 "ld 3,88(11)\n\t" \
3179 "std 3,128(1)\n\t" \
3180 /* arg10 */ \
3181 "ld 3,80(11)\n\t" \
3182 "std 3,120(1)\n\t" \
3183 /* arg9 */ \
3184 "ld 3,72(11)\n\t" \
3185 "std 3,112(1)\n\t" \
3186 /* args1-8 */ \
3187 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3188 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3189 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3190 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3191 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3192 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3193 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3194 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3195 "ld 11, 0(11)\n\t" /* target->r11 */ \
3196 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3197 "mr 11,%1\n\t" \
3198 "mr %0,3\n\t" \
3199 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3200 VALGRIND_RESTORE_STACK \
3201 : /*out*/ "=r" (_res) \
3202 : /*in*/ "r" (&_argvec[2]) \
3203 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3204 ); \
3205 lval = (__typeof__(lval)) _res; \
3206 } while (0)
3207
3208#endif /* PLAT_ppc64be_linux */
3209
3210/* ------------------------- ppc64le-linux ----------------------- */
3211#if defined(PLAT_ppc64le_linux)
3212
3213/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3214
3215/* These regs are trashed by the hidden call. */
3216#define __CALLER_SAVED_REGS \
3217 "lr", "ctr", "xer", \
3218 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3219 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3220 "r11", "r12", "r13"
3221
3222/* Macros to save and align the stack before making a function
3223 call and restore it afterwards as gcc may not keep the stack
3224 pointer aligned if it doesn't realise calls are being made
3225 to other functions. */
3226
3227#define VALGRIND_ALIGN_STACK \
3228 "mr 28,1\n\t" \
3229 "rldicr 1,1,0,59\n\t"
3230#define VALGRIND_RESTORE_STACK \
3231 "mr 1,28\n\t"
3232
3233/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3234 long) == 8. */
3235
3236#define CALL_FN_W_v(lval, orig) \
3237 do { \
3238 volatile OrigFn _orig = (orig); \
3239 volatile unsigned long _argvec[3+0]; \
3240 volatile unsigned long _res; \
3241 /* _argvec[0] holds current r2 across the call */ \
3242 _argvec[1] = (unsigned long)_orig.r2; \
3243 _argvec[2] = (unsigned long)_orig.nraddr; \
3244 __asm__ volatile( \
3245 VALGRIND_ALIGN_STACK \
3246 "mr 12,%1\n\t" \
3247 "std 2,-16(12)\n\t" /* save tocptr */ \
3248 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3249 "ld 12, 0(12)\n\t" /* target->r12 */ \
3250 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3251 "mr 12,%1\n\t" \
3252 "mr %0,3\n\t" \
3253 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3254 VALGRIND_RESTORE_STACK \
3255 : /*out*/ "=r" (_res) \
3256 : /*in*/ "r" (&_argvec[2]) \
3257 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3258 ); \
3259 lval = (__typeof__(lval)) _res; \
3260 } while (0)
3261
3262#define CALL_FN_W_W(lval, orig, arg1) \
3263 do { \
3264 volatile OrigFn _orig = (orig); \
3265 volatile unsigned long _argvec[3+1]; \
3266 volatile unsigned long _res; \
3267 /* _argvec[0] holds current r2 across the call */ \
3268 _argvec[1] = (unsigned long)_orig.r2; \
3269 _argvec[2] = (unsigned long)_orig.nraddr; \
3270 _argvec[2+1] = (unsigned long)arg1; \
3271 __asm__ volatile( \
3272 VALGRIND_ALIGN_STACK \
3273 "mr 12,%1\n\t" \
3274 "std 2,-16(12)\n\t" /* save tocptr */ \
3275 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3276 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3277 "ld 12, 0(12)\n\t" /* target->r12 */ \
3278 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3279 "mr 12,%1\n\t" \
3280 "mr %0,3\n\t" \
3281 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3282 VALGRIND_RESTORE_STACK \
3283 : /*out*/ "=r" (_res) \
3284 : /*in*/ "r" (&_argvec[2]) \
3285 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3286 ); \
3287 lval = (__typeof__(lval)) _res; \
3288 } while (0)
3289
3290#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3291 do { \
3292 volatile OrigFn _orig = (orig); \
3293 volatile unsigned long _argvec[3+2]; \
3294 volatile unsigned long _res; \
3295 /* _argvec[0] holds current r2 across the call */ \
3296 _argvec[1] = (unsigned long)_orig.r2; \
3297 _argvec[2] = (unsigned long)_orig.nraddr; \
3298 _argvec[2+1] = (unsigned long)arg1; \
3299 _argvec[2+2] = (unsigned long)arg2; \
3300 __asm__ volatile( \
3301 VALGRIND_ALIGN_STACK \
3302 "mr 12,%1\n\t" \
3303 "std 2,-16(12)\n\t" /* save tocptr */ \
3304 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3305 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3306 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3307 "ld 12, 0(12)\n\t" /* target->r12 */ \
3308 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3309 "mr 12,%1\n\t" \
3310 "mr %0,3\n\t" \
3311 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3312 VALGRIND_RESTORE_STACK \
3313 : /*out*/ "=r" (_res) \
3314 : /*in*/ "r" (&_argvec[2]) \
3315 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3316 ); \
3317 lval = (__typeof__(lval)) _res; \
3318 } while (0)
3319
3320#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3321 do { \
3322 volatile OrigFn _orig = (orig); \
3323 volatile unsigned long _argvec[3+3]; \
3324 volatile unsigned long _res; \
3325 /* _argvec[0] holds current r2 across the call */ \
3326 _argvec[1] = (unsigned long)_orig.r2; \
3327 _argvec[2] = (unsigned long)_orig.nraddr; \
3328 _argvec[2+1] = (unsigned long)arg1; \
3329 _argvec[2+2] = (unsigned long)arg2; \
3330 _argvec[2+3] = (unsigned long)arg3; \
3331 __asm__ volatile( \
3332 VALGRIND_ALIGN_STACK \
3333 "mr 12,%1\n\t" \
3334 "std 2,-16(12)\n\t" /* save tocptr */ \
3335 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3336 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3337 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3338 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3339 "ld 12, 0(12)\n\t" /* target->r12 */ \
3340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3341 "mr 12,%1\n\t" \
3342 "mr %0,3\n\t" \
3343 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3344 VALGRIND_RESTORE_STACK \
3345 : /*out*/ "=r" (_res) \
3346 : /*in*/ "r" (&_argvec[2]) \
3347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3348 ); \
3349 lval = (__typeof__(lval)) _res; \
3350 } while (0)
3351
3352#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3353 do { \
3354 volatile OrigFn _orig = (orig); \
3355 volatile unsigned long _argvec[3+4]; \
3356 volatile unsigned long _res; \
3357 /* _argvec[0] holds current r2 across the call */ \
3358 _argvec[1] = (unsigned long)_orig.r2; \
3359 _argvec[2] = (unsigned long)_orig.nraddr; \
3360 _argvec[2+1] = (unsigned long)arg1; \
3361 _argvec[2+2] = (unsigned long)arg2; \
3362 _argvec[2+3] = (unsigned long)arg3; \
3363 _argvec[2+4] = (unsigned long)arg4; \
3364 __asm__ volatile( \
3365 VALGRIND_ALIGN_STACK \
3366 "mr 12,%1\n\t" \
3367 "std 2,-16(12)\n\t" /* save tocptr */ \
3368 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3369 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3370 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3371 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3372 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3373 "ld 12, 0(12)\n\t" /* target->r12 */ \
3374 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3375 "mr 12,%1\n\t" \
3376 "mr %0,3\n\t" \
3377 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3378 VALGRIND_RESTORE_STACK \
3379 : /*out*/ "=r" (_res) \
3380 : /*in*/ "r" (&_argvec[2]) \
3381 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3382 ); \
3383 lval = (__typeof__(lval)) _res; \
3384 } while (0)
3385
3386#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3387 do { \
3388 volatile OrigFn _orig = (orig); \
3389 volatile unsigned long _argvec[3+5]; \
3390 volatile unsigned long _res; \
3391 /* _argvec[0] holds current r2 across the call */ \
3392 _argvec[1] = (unsigned long)_orig.r2; \
3393 _argvec[2] = (unsigned long)_orig.nraddr; \
3394 _argvec[2+1] = (unsigned long)arg1; \
3395 _argvec[2+2] = (unsigned long)arg2; \
3396 _argvec[2+3] = (unsigned long)arg3; \
3397 _argvec[2+4] = (unsigned long)arg4; \
3398 _argvec[2+5] = (unsigned long)arg5; \
3399 __asm__ volatile( \
3400 VALGRIND_ALIGN_STACK \
3401 "mr 12,%1\n\t" \
3402 "std 2,-16(12)\n\t" /* save tocptr */ \
3403 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3404 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3405 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3406 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3407 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3408 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3409 "ld 12, 0(12)\n\t" /* target->r12 */ \
3410 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3411 "mr 12,%1\n\t" \
3412 "mr %0,3\n\t" \
3413 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3414 VALGRIND_RESTORE_STACK \
3415 : /*out*/ "=r" (_res) \
3416 : /*in*/ "r" (&_argvec[2]) \
3417 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3418 ); \
3419 lval = (__typeof__(lval)) _res; \
3420 } while (0)
3421
3422#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3423 do { \
3424 volatile OrigFn _orig = (orig); \
3425 volatile unsigned long _argvec[3+6]; \
3426 volatile unsigned long _res; \
3427 /* _argvec[0] holds current r2 across the call */ \
3428 _argvec[1] = (unsigned long)_orig.r2; \
3429 _argvec[2] = (unsigned long)_orig.nraddr; \
3430 _argvec[2+1] = (unsigned long)arg1; \
3431 _argvec[2+2] = (unsigned long)arg2; \
3432 _argvec[2+3] = (unsigned long)arg3; \
3433 _argvec[2+4] = (unsigned long)arg4; \
3434 _argvec[2+5] = (unsigned long)arg5; \
3435 _argvec[2+6] = (unsigned long)arg6; \
3436 __asm__ volatile( \
3437 VALGRIND_ALIGN_STACK \
3438 "mr 12,%1\n\t" \
3439 "std 2,-16(12)\n\t" /* save tocptr */ \
3440 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3441 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3442 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3443 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3444 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3445 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3446 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3447 "ld 12, 0(12)\n\t" /* target->r12 */ \
3448 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3449 "mr 12,%1\n\t" \
3450 "mr %0,3\n\t" \
3451 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3452 VALGRIND_RESTORE_STACK \
3453 : /*out*/ "=r" (_res) \
3454 : /*in*/ "r" (&_argvec[2]) \
3455 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3456 ); \
3457 lval = (__typeof__(lval)) _res; \
3458 } while (0)
3459
3460#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3461 arg7) \
3462 do { \
3463 volatile OrigFn _orig = (orig); \
3464 volatile unsigned long _argvec[3+7]; \
3465 volatile unsigned long _res; \
3466 /* _argvec[0] holds current r2 across the call */ \
3467 _argvec[1] = (unsigned long)_orig.r2; \
3468 _argvec[2] = (unsigned long)_orig.nraddr; \
3469 _argvec[2+1] = (unsigned long)arg1; \
3470 _argvec[2+2] = (unsigned long)arg2; \
3471 _argvec[2+3] = (unsigned long)arg3; \
3472 _argvec[2+4] = (unsigned long)arg4; \
3473 _argvec[2+5] = (unsigned long)arg5; \
3474 _argvec[2+6] = (unsigned long)arg6; \
3475 _argvec[2+7] = (unsigned long)arg7; \
3476 __asm__ volatile( \
3477 VALGRIND_ALIGN_STACK \
3478 "mr 12,%1\n\t" \
3479 "std 2,-16(12)\n\t" /* save tocptr */ \
3480 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3481 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3482 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3483 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3484 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3485 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3486 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3487 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3488 "ld 12, 0(12)\n\t" /* target->r12 */ \
3489 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3490 "mr 12,%1\n\t" \
3491 "mr %0,3\n\t" \
3492 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3493 VALGRIND_RESTORE_STACK \
3494 : /*out*/ "=r" (_res) \
3495 : /*in*/ "r" (&_argvec[2]) \
3496 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3497 ); \
3498 lval = (__typeof__(lval)) _res; \
3499 } while (0)
3500
3501#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3502 arg7,arg8) \
3503 do { \
3504 volatile OrigFn _orig = (orig); \
3505 volatile unsigned long _argvec[3+8]; \
3506 volatile unsigned long _res; \
3507 /* _argvec[0] holds current r2 across the call */ \
3508 _argvec[1] = (unsigned long)_orig.r2; \
3509 _argvec[2] = (unsigned long)_orig.nraddr; \
3510 _argvec[2+1] = (unsigned long)arg1; \
3511 _argvec[2+2] = (unsigned long)arg2; \
3512 _argvec[2+3] = (unsigned long)arg3; \
3513 _argvec[2+4] = (unsigned long)arg4; \
3514 _argvec[2+5] = (unsigned long)arg5; \
3515 _argvec[2+6] = (unsigned long)arg6; \
3516 _argvec[2+7] = (unsigned long)arg7; \
3517 _argvec[2+8] = (unsigned long)arg8; \
3518 __asm__ volatile( \
3519 VALGRIND_ALIGN_STACK \
3520 "mr 12,%1\n\t" \
3521 "std 2,-16(12)\n\t" /* save tocptr */ \
3522 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3523 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3524 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3525 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3526 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3527 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3528 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3529 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3530 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3531 "ld 12, 0(12)\n\t" /* target->r12 */ \
3532 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3533 "mr 12,%1\n\t" \
3534 "mr %0,3\n\t" \
3535 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3536 VALGRIND_RESTORE_STACK \
3537 : /*out*/ "=r" (_res) \
3538 : /*in*/ "r" (&_argvec[2]) \
3539 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3540 ); \
3541 lval = (__typeof__(lval)) _res; \
3542 } while (0)
3543
3544#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3545 arg7,arg8,arg9) \
3546 do { \
3547 volatile OrigFn _orig = (orig); \
3548 volatile unsigned long _argvec[3+9]; \
3549 volatile unsigned long _res; \
3550 /* _argvec[0] holds current r2 across the call */ \
3551 _argvec[1] = (unsigned long)_orig.r2; \
3552 _argvec[2] = (unsigned long)_orig.nraddr; \
3553 _argvec[2+1] = (unsigned long)arg1; \
3554 _argvec[2+2] = (unsigned long)arg2; \
3555 _argvec[2+3] = (unsigned long)arg3; \
3556 _argvec[2+4] = (unsigned long)arg4; \
3557 _argvec[2+5] = (unsigned long)arg5; \
3558 _argvec[2+6] = (unsigned long)arg6; \
3559 _argvec[2+7] = (unsigned long)arg7; \
3560 _argvec[2+8] = (unsigned long)arg8; \
3561 _argvec[2+9] = (unsigned long)arg9; \
3562 __asm__ volatile( \
3563 VALGRIND_ALIGN_STACK \
3564 "mr 12,%1\n\t" \
3565 "std 2,-16(12)\n\t" /* save tocptr */ \
3566 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3567 "addi 1,1,-128\n\t" /* expand stack frame */ \
3568 /* arg9 */ \
3569 "ld 3,72(12)\n\t" \
3570 "std 3,96(1)\n\t" \
3571 /* args1-8 */ \
3572 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3573 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3574 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3575 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3576 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3577 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3578 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3579 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3580 "ld 12, 0(12)\n\t" /* target->r12 */ \
3581 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3582 "mr 12,%1\n\t" \
3583 "mr %0,3\n\t" \
3584 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3585 VALGRIND_RESTORE_STACK \
3586 : /*out*/ "=r" (_res) \
3587 : /*in*/ "r" (&_argvec[2]) \
3588 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3589 ); \
3590 lval = (__typeof__(lval)) _res; \
3591 } while (0)
3592
3593#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3594 arg7,arg8,arg9,arg10) \
3595 do { \
3596 volatile OrigFn _orig = (orig); \
3597 volatile unsigned long _argvec[3+10]; \
3598 volatile unsigned long _res; \
3599 /* _argvec[0] holds current r2 across the call */ \
3600 _argvec[1] = (unsigned long)_orig.r2; \
3601 _argvec[2] = (unsigned long)_orig.nraddr; \
3602 _argvec[2+1] = (unsigned long)arg1; \
3603 _argvec[2+2] = (unsigned long)arg2; \
3604 _argvec[2+3] = (unsigned long)arg3; \
3605 _argvec[2+4] = (unsigned long)arg4; \
3606 _argvec[2+5] = (unsigned long)arg5; \
3607 _argvec[2+6] = (unsigned long)arg6; \
3608 _argvec[2+7] = (unsigned long)arg7; \
3609 _argvec[2+8] = (unsigned long)arg8; \
3610 _argvec[2+9] = (unsigned long)arg9; \
3611 _argvec[2+10] = (unsigned long)arg10; \
3612 __asm__ volatile( \
3613 VALGRIND_ALIGN_STACK \
3614 "mr 12,%1\n\t" \
3615 "std 2,-16(12)\n\t" /* save tocptr */ \
3616 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3617 "addi 1,1,-128\n\t" /* expand stack frame */ \
3618 /* arg10 */ \
3619 "ld 3,80(12)\n\t" \
3620 "std 3,104(1)\n\t" \
3621 /* arg9 */ \
3622 "ld 3,72(12)\n\t" \
3623 "std 3,96(1)\n\t" \
3624 /* args1-8 */ \
3625 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3626 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3627 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3628 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3629 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3630 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3631 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3632 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3633 "ld 12, 0(12)\n\t" /* target->r12 */ \
3634 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3635 "mr 12,%1\n\t" \
3636 "mr %0,3\n\t" \
3637 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3638 VALGRIND_RESTORE_STACK \
3639 : /*out*/ "=r" (_res) \
3640 : /*in*/ "r" (&_argvec[2]) \
3641 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3642 ); \
3643 lval = (__typeof__(lval)) _res; \
3644 } while (0)
3645
3646#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3647 arg7,arg8,arg9,arg10,arg11) \
3648 do { \
3649 volatile OrigFn _orig = (orig); \
3650 volatile unsigned long _argvec[3+11]; \
3651 volatile unsigned long _res; \
3652 /* _argvec[0] holds current r2 across the call */ \
3653 _argvec[1] = (unsigned long)_orig.r2; \
3654 _argvec[2] = (unsigned long)_orig.nraddr; \
3655 _argvec[2+1] = (unsigned long)arg1; \
3656 _argvec[2+2] = (unsigned long)arg2; \
3657 _argvec[2+3] = (unsigned long)arg3; \
3658 _argvec[2+4] = (unsigned long)arg4; \
3659 _argvec[2+5] = (unsigned long)arg5; \
3660 _argvec[2+6] = (unsigned long)arg6; \
3661 _argvec[2+7] = (unsigned long)arg7; \
3662 _argvec[2+8] = (unsigned long)arg8; \
3663 _argvec[2+9] = (unsigned long)arg9; \
3664 _argvec[2+10] = (unsigned long)arg10; \
3665 _argvec[2+11] = (unsigned long)arg11; \
3666 __asm__ volatile( \
3667 VALGRIND_ALIGN_STACK \
3668 "mr 12,%1\n\t" \
3669 "std 2,-16(12)\n\t" /* save tocptr */ \
3670 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3671 "addi 1,1,-144\n\t" /* expand stack frame */ \
3672 /* arg11 */ \
3673 "ld 3,88(12)\n\t" \
3674 "std 3,112(1)\n\t" \
3675 /* arg10 */ \
3676 "ld 3,80(12)\n\t" \
3677 "std 3,104(1)\n\t" \
3678 /* arg9 */ \
3679 "ld 3,72(12)\n\t" \
3680 "std 3,96(1)\n\t" \
3681 /* args1-8 */ \
3682 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3683 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3684 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3685 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3686 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3687 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3688 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3689 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3690 "ld 12, 0(12)\n\t" /* target->r12 */ \
3691 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3692 "mr 12,%1\n\t" \
3693 "mr %0,3\n\t" \
3694 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3695 VALGRIND_RESTORE_STACK \
3696 : /*out*/ "=r" (_res) \
3697 : /*in*/ "r" (&_argvec[2]) \
3698 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3699 ); \
3700 lval = (__typeof__(lval)) _res; \
3701 } while (0)
3702
3703#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3704 arg7,arg8,arg9,arg10,arg11,arg12) \
3705 do { \
3706 volatile OrigFn _orig = (orig); \
3707 volatile unsigned long _argvec[3+12]; \
3708 volatile unsigned long _res; \
3709 /* _argvec[0] holds current r2 across the call */ \
3710 _argvec[1] = (unsigned long)_orig.r2; \
3711 _argvec[2] = (unsigned long)_orig.nraddr; \
3712 _argvec[2+1] = (unsigned long)arg1; \
3713 _argvec[2+2] = (unsigned long)arg2; \
3714 _argvec[2+3] = (unsigned long)arg3; \
3715 _argvec[2+4] = (unsigned long)arg4; \
3716 _argvec[2+5] = (unsigned long)arg5; \
3717 _argvec[2+6] = (unsigned long)arg6; \
3718 _argvec[2+7] = (unsigned long)arg7; \
3719 _argvec[2+8] = (unsigned long)arg8; \
3720 _argvec[2+9] = (unsigned long)arg9; \
3721 _argvec[2+10] = (unsigned long)arg10; \
3722 _argvec[2+11] = (unsigned long)arg11; \
3723 _argvec[2+12] = (unsigned long)arg12; \
3724 __asm__ volatile( \
3725 VALGRIND_ALIGN_STACK \
3726 "mr 12,%1\n\t" \
3727 "std 2,-16(12)\n\t" /* save tocptr */ \
3728 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3729 "addi 1,1,-144\n\t" /* expand stack frame */ \
3730 /* arg12 */ \
3731 "ld 3,96(12)\n\t" \
3732 "std 3,120(1)\n\t" \
3733 /* arg11 */ \
3734 "ld 3,88(12)\n\t" \
3735 "std 3,112(1)\n\t" \
3736 /* arg10 */ \
3737 "ld 3,80(12)\n\t" \
3738 "std 3,104(1)\n\t" \
3739 /* arg9 */ \
3740 "ld 3,72(12)\n\t" \
3741 "std 3,96(1)\n\t" \
3742 /* args1-8 */ \
3743 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3744 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3745 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3746 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3747 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3748 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3749 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3750 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3751 "ld 12, 0(12)\n\t" /* target->r12 */ \
3752 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3753 "mr 12,%1\n\t" \
3754 "mr %0,3\n\t" \
3755 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3756 VALGRIND_RESTORE_STACK \
3757 : /*out*/ "=r" (_res) \
3758 : /*in*/ "r" (&_argvec[2]) \
3759 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3760 ); \
3761 lval = (__typeof__(lval)) _res; \
3762 } while (0)
3763
3764#endif /* PLAT_ppc64le_linux */
3765
3766/* ------------------------- arm-linux ------------------------- */
3767
3768#if defined(PLAT_arm_linux)
3769
3770/* These regs are trashed by the hidden call. */
3771#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3772
3773/* Macros to save and align the stack before making a function
3774 call and restore it afterwards as gcc may not keep the stack
3775 pointer aligned if it doesn't realise calls are being made
3776 to other functions. */
3777
3778/* This is a bit tricky. We store the original stack pointer in r10
3779 as it is callee-saves. gcc doesn't allow the use of r11 for some
3780 reason. Also, we can't directly "bic" the stack pointer in thumb
3781 mode since r13 isn't an allowed register number in that context.
3782 So use r4 as a temporary, since that is about to get trashed
3783 anyway, just after each use of this macro. Side effect is we need
3784 to be very careful about any future changes, since
3785 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3786#define VALGRIND_ALIGN_STACK \
3787 "mov r10, sp\n\t" \
3788 "mov r4, sp\n\t" \
3789 "bic r4, r4, #7\n\t" \
3790 "mov sp, r4\n\t"
3791#define VALGRIND_RESTORE_STACK \
3792 "mov sp, r10\n\t"
3793
3794/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3795 long) == 4. */
3796
3797#define CALL_FN_W_v(lval, orig) \
3798 do { \
3799 volatile OrigFn _orig = (orig); \
3800 volatile unsigned long _argvec[1]; \
3801 volatile unsigned long _res; \
3802 _argvec[0] = (unsigned long)_orig.nraddr; \
3803 __asm__ volatile( \
3804 VALGRIND_ALIGN_STACK \
3805 "ldr r4, [%1] \n\t" /* target->r4 */ \
3806 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3807 VALGRIND_RESTORE_STACK \
3808 "mov %0, r0\n" \
3809 : /*out*/ "=r" (_res) \
3810 : /*in*/ "0" (&_argvec[0]) \
3811 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3812 ); \
3813 lval = (__typeof__(lval)) _res; \
3814 } while (0)
3815
3816#define CALL_FN_W_W(lval, orig, arg1) \
3817 do { \
3818 volatile OrigFn _orig = (orig); \
3819 volatile unsigned long _argvec[2]; \
3820 volatile unsigned long _res; \
3821 _argvec[0] = (unsigned long)_orig.nraddr; \
3822 _argvec[1] = (unsigned long)(arg1); \
3823 __asm__ volatile( \
3824 VALGRIND_ALIGN_STACK \
3825 "ldr r0, [%1, #4] \n\t" \
3826 "ldr r4, [%1] \n\t" /* target->r4 */ \
3827 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3828 VALGRIND_RESTORE_STACK \
3829 "mov %0, r0\n" \
3830 : /*out*/ "=r" (_res) \
3831 : /*in*/ "0" (&_argvec[0]) \
3832 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3833 ); \
3834 lval = (__typeof__(lval)) _res; \
3835 } while (0)
3836
3837#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3838 do { \
3839 volatile OrigFn _orig = (orig); \
3840 volatile unsigned long _argvec[3]; \
3841 volatile unsigned long _res; \
3842 _argvec[0] = (unsigned long)_orig.nraddr; \
3843 _argvec[1] = (unsigned long)(arg1); \
3844 _argvec[2] = (unsigned long)(arg2); \
3845 __asm__ volatile( \
3846 VALGRIND_ALIGN_STACK \
3847 "ldr r0, [%1, #4] \n\t" \
3848 "ldr r1, [%1, #8] \n\t" \
3849 "ldr r4, [%1] \n\t" /* target->r4 */ \
3850 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3851 VALGRIND_RESTORE_STACK \
3852 "mov %0, r0\n" \
3853 : /*out*/ "=r" (_res) \
3854 : /*in*/ "0" (&_argvec[0]) \
3855 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3856 ); \
3857 lval = (__typeof__(lval)) _res; \
3858 } while (0)
3859
3860#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3861 do { \
3862 volatile OrigFn _orig = (orig); \
3863 volatile unsigned long _argvec[4]; \
3864 volatile unsigned long _res; \
3865 _argvec[0] = (unsigned long)_orig.nraddr; \
3866 _argvec[1] = (unsigned long)(arg1); \
3867 _argvec[2] = (unsigned long)(arg2); \
3868 _argvec[3] = (unsigned long)(arg3); \
3869 __asm__ volatile( \
3870 VALGRIND_ALIGN_STACK \
3871 "ldr r0, [%1, #4] \n\t" \
3872 "ldr r1, [%1, #8] \n\t" \
3873 "ldr r2, [%1, #12] \n\t" \
3874 "ldr r4, [%1] \n\t" /* target->r4 */ \
3875 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3876 VALGRIND_RESTORE_STACK \
3877 "mov %0, r0\n" \
3878 : /*out*/ "=r" (_res) \
3879 : /*in*/ "0" (&_argvec[0]) \
3880 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3881 ); \
3882 lval = (__typeof__(lval)) _res; \
3883 } while (0)
3884
3885#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3886 do { \
3887 volatile OrigFn _orig = (orig); \
3888 volatile unsigned long _argvec[5]; \
3889 volatile unsigned long _res; \
3890 _argvec[0] = (unsigned long)_orig.nraddr; \
3891 _argvec[1] = (unsigned long)(arg1); \
3892 _argvec[2] = (unsigned long)(arg2); \
3893 _argvec[3] = (unsigned long)(arg3); \
3894 _argvec[4] = (unsigned long)(arg4); \
3895 __asm__ volatile( \
3896 VALGRIND_ALIGN_STACK \
3897 "ldr r0, [%1, #4] \n\t" \
3898 "ldr r1, [%1, #8] \n\t" \
3899 "ldr r2, [%1, #12] \n\t" \
3900 "ldr r3, [%1, #16] \n\t" \
3901 "ldr r4, [%1] \n\t" /* target->r4 */ \
3902 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3903 VALGRIND_RESTORE_STACK \
3904 "mov %0, r0" \
3905 : /*out*/ "=r" (_res) \
3906 : /*in*/ "0" (&_argvec[0]) \
3907 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3908 ); \
3909 lval = (__typeof__(lval)) _res; \
3910 } while (0)
3911
3912#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3913 do { \
3914 volatile OrigFn _orig = (orig); \
3915 volatile unsigned long _argvec[6]; \
3916 volatile unsigned long _res; \
3917 _argvec[0] = (unsigned long)_orig.nraddr; \
3918 _argvec[1] = (unsigned long)(arg1); \
3919 _argvec[2] = (unsigned long)(arg2); \
3920 _argvec[3] = (unsigned long)(arg3); \
3921 _argvec[4] = (unsigned long)(arg4); \
3922 _argvec[5] = (unsigned long)(arg5); \
3923 __asm__ volatile( \
3924 VALGRIND_ALIGN_STACK \
3925 "sub sp, sp, #4 \n\t" \
3926 "ldr r0, [%1, #20] \n\t" \
3927 "push {r0} \n\t" \
3928 "ldr r0, [%1, #4] \n\t" \
3929 "ldr r1, [%1, #8] \n\t" \
3930 "ldr r2, [%1, #12] \n\t" \
3931 "ldr r3, [%1, #16] \n\t" \
3932 "ldr r4, [%1] \n\t" /* target->r4 */ \
3933 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3934 VALGRIND_RESTORE_STACK \
3935 "mov %0, r0" \
3936 : /*out*/ "=r" (_res) \
3937 : /*in*/ "0" (&_argvec[0]) \
3938 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3939 ); \
3940 lval = (__typeof__(lval)) _res; \
3941 } while (0)
3942
3943#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3944 do { \
3945 volatile OrigFn _orig = (orig); \
3946 volatile unsigned long _argvec[7]; \
3947 volatile unsigned long _res; \
3948 _argvec[0] = (unsigned long)_orig.nraddr; \
3949 _argvec[1] = (unsigned long)(arg1); \
3950 _argvec[2] = (unsigned long)(arg2); \
3951 _argvec[3] = (unsigned long)(arg3); \
3952 _argvec[4] = (unsigned long)(arg4); \
3953 _argvec[5] = (unsigned long)(arg5); \
3954 _argvec[6] = (unsigned long)(arg6); \
3955 __asm__ volatile( \
3956 VALGRIND_ALIGN_STACK \
3957 "ldr r0, [%1, #20] \n\t" \
3958 "ldr r1, [%1, #24] \n\t" \
3959 "push {r0, r1} \n\t" \
3960 "ldr r0, [%1, #4] \n\t" \
3961 "ldr r1, [%1, #8] \n\t" \
3962 "ldr r2, [%1, #12] \n\t" \
3963 "ldr r3, [%1, #16] \n\t" \
3964 "ldr r4, [%1] \n\t" /* target->r4 */ \
3965 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3966 VALGRIND_RESTORE_STACK \
3967 "mov %0, r0" \
3968 : /*out*/ "=r" (_res) \
3969 : /*in*/ "0" (&_argvec[0]) \
3970 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3971 ); \
3972 lval = (__typeof__(lval)) _res; \
3973 } while (0)
3974
3975#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3976 arg7) \
3977 do { \
3978 volatile OrigFn _orig = (orig); \
3979 volatile unsigned long _argvec[8]; \
3980 volatile unsigned long _res; \
3981 _argvec[0] = (unsigned long)_orig.nraddr; \
3982 _argvec[1] = (unsigned long)(arg1); \
3983 _argvec[2] = (unsigned long)(arg2); \
3984 _argvec[3] = (unsigned long)(arg3); \
3985 _argvec[4] = (unsigned long)(arg4); \
3986 _argvec[5] = (unsigned long)(arg5); \
3987 _argvec[6] = (unsigned long)(arg6); \
3988 _argvec[7] = (unsigned long)(arg7); \
3989 __asm__ volatile( \
3990 VALGRIND_ALIGN_STACK \
3991 "sub sp, sp, #4 \n\t" \
3992 "ldr r0, [%1, #20] \n\t" \
3993 "ldr r1, [%1, #24] \n\t" \
3994 "ldr r2, [%1, #28] \n\t" \
3995 "push {r0, r1, r2} \n\t" \
3996 "ldr r0, [%1, #4] \n\t" \
3997 "ldr r1, [%1, #8] \n\t" \
3998 "ldr r2, [%1, #12] \n\t" \
3999 "ldr r3, [%1, #16] \n\t" \
4000 "ldr r4, [%1] \n\t" /* target->r4 */ \
4001 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4002 VALGRIND_RESTORE_STACK \
4003 "mov %0, r0" \
4004 : /*out*/ "=r" (_res) \
4005 : /*in*/ "0" (&_argvec[0]) \
4006 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4007 ); \
4008 lval = (__typeof__(lval)) _res; \
4009 } while (0)
4010
4011#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4012 arg7,arg8) \
4013 do { \
4014 volatile OrigFn _orig = (orig); \
4015 volatile unsigned long _argvec[9]; \
4016 volatile unsigned long _res; \
4017 _argvec[0] = (unsigned long)_orig.nraddr; \
4018 _argvec[1] = (unsigned long)(arg1); \
4019 _argvec[2] = (unsigned long)(arg2); \
4020 _argvec[3] = (unsigned long)(arg3); \
4021 _argvec[4] = (unsigned long)(arg4); \
4022 _argvec[5] = (unsigned long)(arg5); \
4023 _argvec[6] = (unsigned long)(arg6); \
4024 _argvec[7] = (unsigned long)(arg7); \
4025 _argvec[8] = (unsigned long)(arg8); \
4026 __asm__ volatile( \
4027 VALGRIND_ALIGN_STACK \
4028 "ldr r0, [%1, #20] \n\t" \
4029 "ldr r1, [%1, #24] \n\t" \
4030 "ldr r2, [%1, #28] \n\t" \
4031 "ldr r3, [%1, #32] \n\t" \
4032 "push {r0, r1, r2, r3} \n\t" \
4033 "ldr r0, [%1, #4] \n\t" \
4034 "ldr r1, [%1, #8] \n\t" \
4035 "ldr r2, [%1, #12] \n\t" \
4036 "ldr r3, [%1, #16] \n\t" \
4037 "ldr r4, [%1] \n\t" /* target->r4 */ \
4038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4039 VALGRIND_RESTORE_STACK \
4040 "mov %0, r0" \
4041 : /*out*/ "=r" (_res) \
4042 : /*in*/ "0" (&_argvec[0]) \
4043 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4044 ); \
4045 lval = (__typeof__(lval)) _res; \
4046 } while (0)
4047
4048#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4049 arg7,arg8,arg9) \
4050 do { \
4051 volatile OrigFn _orig = (orig); \
4052 volatile unsigned long _argvec[10]; \
4053 volatile unsigned long _res; \
4054 _argvec[0] = (unsigned long)_orig.nraddr; \
4055 _argvec[1] = (unsigned long)(arg1); \
4056 _argvec[2] = (unsigned long)(arg2); \
4057 _argvec[3] = (unsigned long)(arg3); \
4058 _argvec[4] = (unsigned long)(arg4); \
4059 _argvec[5] = (unsigned long)(arg5); \
4060 _argvec[6] = (unsigned long)(arg6); \
4061 _argvec[7] = (unsigned long)(arg7); \
4062 _argvec[8] = (unsigned long)(arg8); \
4063 _argvec[9] = (unsigned long)(arg9); \
4064 __asm__ volatile( \
4065 VALGRIND_ALIGN_STACK \
4066 "sub sp, sp, #4 \n\t" \
4067 "ldr r0, [%1, #20] \n\t" \
4068 "ldr r1, [%1, #24] \n\t" \
4069 "ldr r2, [%1, #28] \n\t" \
4070 "ldr r3, [%1, #32] \n\t" \
4071 "ldr r4, [%1, #36] \n\t" \
4072 "push {r0, r1, r2, r3, r4} \n\t" \
4073 "ldr r0, [%1, #4] \n\t" \
4074 "ldr r1, [%1, #8] \n\t" \
4075 "ldr r2, [%1, #12] \n\t" \
4076 "ldr r3, [%1, #16] \n\t" \
4077 "ldr r4, [%1] \n\t" /* target->r4 */ \
4078 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4079 VALGRIND_RESTORE_STACK \
4080 "mov %0, r0" \
4081 : /*out*/ "=r" (_res) \
4082 : /*in*/ "0" (&_argvec[0]) \
4083 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4084 ); \
4085 lval = (__typeof__(lval)) _res; \
4086 } while (0)
4087
4088#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4089 arg7,arg8,arg9,arg10) \
4090 do { \
4091 volatile OrigFn _orig = (orig); \
4092 volatile unsigned long _argvec[11]; \
4093 volatile unsigned long _res; \
4094 _argvec[0] = (unsigned long)_orig.nraddr; \
4095 _argvec[1] = (unsigned long)(arg1); \
4096 _argvec[2] = (unsigned long)(arg2); \
4097 _argvec[3] = (unsigned long)(arg3); \
4098 _argvec[4] = (unsigned long)(arg4); \
4099 _argvec[5] = (unsigned long)(arg5); \
4100 _argvec[6] = (unsigned long)(arg6); \
4101 _argvec[7] = (unsigned long)(arg7); \
4102 _argvec[8] = (unsigned long)(arg8); \
4103 _argvec[9] = (unsigned long)(arg9); \
4104 _argvec[10] = (unsigned long)(arg10); \
4105 __asm__ volatile( \
4106 VALGRIND_ALIGN_STACK \
4107 "ldr r0, [%1, #40] \n\t" \
4108 "push {r0} \n\t" \
4109 "ldr r0, [%1, #20] \n\t" \
4110 "ldr r1, [%1, #24] \n\t" \
4111 "ldr r2, [%1, #28] \n\t" \
4112 "ldr r3, [%1, #32] \n\t" \
4113 "ldr r4, [%1, #36] \n\t" \
4114 "push {r0, r1, r2, r3, r4} \n\t" \
4115 "ldr r0, [%1, #4] \n\t" \
4116 "ldr r1, [%1, #8] \n\t" \
4117 "ldr r2, [%1, #12] \n\t" \
4118 "ldr r3, [%1, #16] \n\t" \
4119 "ldr r4, [%1] \n\t" /* target->r4 */ \
4120 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4121 VALGRIND_RESTORE_STACK \
4122 "mov %0, r0" \
4123 : /*out*/ "=r" (_res) \
4124 : /*in*/ "0" (&_argvec[0]) \
4125 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4126 ); \
4127 lval = (__typeof__(lval)) _res; \
4128 } while (0)
4129
4130#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4131 arg6,arg7,arg8,arg9,arg10, \
4132 arg11) \
4133 do { \
4134 volatile OrigFn _orig = (orig); \
4135 volatile unsigned long _argvec[12]; \
4136 volatile unsigned long _res; \
4137 _argvec[0] = (unsigned long)_orig.nraddr; \
4138 _argvec[1] = (unsigned long)(arg1); \
4139 _argvec[2] = (unsigned long)(arg2); \
4140 _argvec[3] = (unsigned long)(arg3); \
4141 _argvec[4] = (unsigned long)(arg4); \
4142 _argvec[5] = (unsigned long)(arg5); \
4143 _argvec[6] = (unsigned long)(arg6); \
4144 _argvec[7] = (unsigned long)(arg7); \
4145 _argvec[8] = (unsigned long)(arg8); \
4146 _argvec[9] = (unsigned long)(arg9); \
4147 _argvec[10] = (unsigned long)(arg10); \
4148 _argvec[11] = (unsigned long)(arg11); \
4149 __asm__ volatile( \
4150 VALGRIND_ALIGN_STACK \
4151 "sub sp, sp, #4 \n\t" \
4152 "ldr r0, [%1, #40] \n\t" \
4153 "ldr r1, [%1, #44] \n\t" \
4154 "push {r0, r1} \n\t" \
4155 "ldr r0, [%1, #20] \n\t" \
4156 "ldr r1, [%1, #24] \n\t" \
4157 "ldr r2, [%1, #28] \n\t" \
4158 "ldr r3, [%1, #32] \n\t" \
4159 "ldr r4, [%1, #36] \n\t" \
4160 "push {r0, r1, r2, r3, r4} \n\t" \
4161 "ldr r0, [%1, #4] \n\t" \
4162 "ldr r1, [%1, #8] \n\t" \
4163 "ldr r2, [%1, #12] \n\t" \
4164 "ldr r3, [%1, #16] \n\t" \
4165 "ldr r4, [%1] \n\t" /* target->r4 */ \
4166 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4167 VALGRIND_RESTORE_STACK \
4168 "mov %0, r0" \
4169 : /*out*/ "=r" (_res) \
4170 : /*in*/ "0" (&_argvec[0]) \
4171 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4172 ); \
4173 lval = (__typeof__(lval)) _res; \
4174 } while (0)
4175
4176#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4177 arg6,arg7,arg8,arg9,arg10, \
4178 arg11,arg12) \
4179 do { \
4180 volatile OrigFn _orig = (orig); \
4181 volatile unsigned long _argvec[13]; \
4182 volatile unsigned long _res; \
4183 _argvec[0] = (unsigned long)_orig.nraddr; \
4184 _argvec[1] = (unsigned long)(arg1); \
4185 _argvec[2] = (unsigned long)(arg2); \
4186 _argvec[3] = (unsigned long)(arg3); \
4187 _argvec[4] = (unsigned long)(arg4); \
4188 _argvec[5] = (unsigned long)(arg5); \
4189 _argvec[6] = (unsigned long)(arg6); \
4190 _argvec[7] = (unsigned long)(arg7); \
4191 _argvec[8] = (unsigned long)(arg8); \
4192 _argvec[9] = (unsigned long)(arg9); \
4193 _argvec[10] = (unsigned long)(arg10); \
4194 _argvec[11] = (unsigned long)(arg11); \
4195 _argvec[12] = (unsigned long)(arg12); \
4196 __asm__ volatile( \
4197 VALGRIND_ALIGN_STACK \
4198 "ldr r0, [%1, #40] \n\t" \
4199 "ldr r1, [%1, #44] \n\t" \
4200 "ldr r2, [%1, #48] \n\t" \
4201 "push {r0, r1, r2} \n\t" \
4202 "ldr r0, [%1, #20] \n\t" \
4203 "ldr r1, [%1, #24] \n\t" \
4204 "ldr r2, [%1, #28] \n\t" \
4205 "ldr r3, [%1, #32] \n\t" \
4206 "ldr r4, [%1, #36] \n\t" \
4207 "push {r0, r1, r2, r3, r4} \n\t" \
4208 "ldr r0, [%1, #4] \n\t" \
4209 "ldr r1, [%1, #8] \n\t" \
4210 "ldr r2, [%1, #12] \n\t" \
4211 "ldr r3, [%1, #16] \n\t" \
4212 "ldr r4, [%1] \n\t" /* target->r4 */ \
4213 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4214 VALGRIND_RESTORE_STACK \
4215 "mov %0, r0" \
4216 : /*out*/ "=r" (_res) \
4217 : /*in*/ "0" (&_argvec[0]) \
4218 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4219 ); \
4220 lval = (__typeof__(lval)) _res; \
4221 } while (0)
4222
4223#endif /* PLAT_arm_linux */
4224
4225/* ------------------------ arm64-linux ------------------------ */
4226
4227#if defined(PLAT_arm64_linux)
4228
4229/* These regs are trashed by the hidden call. */
4230#define __CALLER_SAVED_REGS \
4231 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4232 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4233 "x18", "x19", "x20", "x30", \
4234 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4235 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4236 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4237 "v26", "v27", "v28", "v29", "v30", "v31"
4238
4239/* x21 is callee-saved, so we can use it to save and restore SP around
4240 the hidden call. */
4241#define VALGRIND_ALIGN_STACK \
4242 "mov x21, sp\n\t" \
4243 "bic sp, x21, #15\n\t"
4244#define VALGRIND_RESTORE_STACK \
4245 "mov sp, x21\n\t"
4246
4247/* These CALL_FN_ macros assume that on arm64-linux,
4248 sizeof(unsigned long) == 8. */
4249
4250#define CALL_FN_W_v(lval, orig) \
4251 do { \
4252 volatile OrigFn _orig = (orig); \
4253 volatile unsigned long _argvec[1]; \
4254 volatile unsigned long _res; \
4255 _argvec[0] = (unsigned long)_orig.nraddr; \
4256 __asm__ volatile( \
4257 VALGRIND_ALIGN_STACK \
4258 "ldr x8, [%1] \n\t" /* target->x8 */ \
4259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4260 VALGRIND_RESTORE_STACK \
4261 "mov %0, x0\n" \
4262 : /*out*/ "=r" (_res) \
4263 : /*in*/ "0" (&_argvec[0]) \
4264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4265 ); \
4266 lval = (__typeof__(lval)) _res; \
4267 } while (0)
4268
4269#define CALL_FN_W_W(lval, orig, arg1) \
4270 do { \
4271 volatile OrigFn _orig = (orig); \
4272 volatile unsigned long _argvec[2]; \
4273 volatile unsigned long _res; \
4274 _argvec[0] = (unsigned long)_orig.nraddr; \
4275 _argvec[1] = (unsigned long)(arg1); \
4276 __asm__ volatile( \
4277 VALGRIND_ALIGN_STACK \
4278 "ldr x0, [%1, #8] \n\t" \
4279 "ldr x8, [%1] \n\t" /* target->x8 */ \
4280 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4281 VALGRIND_RESTORE_STACK \
4282 "mov %0, x0\n" \
4283 : /*out*/ "=r" (_res) \
4284 : /*in*/ "0" (&_argvec[0]) \
4285 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4286 ); \
4287 lval = (__typeof__(lval)) _res; \
4288 } while (0)
4289
4290#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4291 do { \
4292 volatile OrigFn _orig = (orig); \
4293 volatile unsigned long _argvec[3]; \
4294 volatile unsigned long _res; \
4295 _argvec[0] = (unsigned long)_orig.nraddr; \
4296 _argvec[1] = (unsigned long)(arg1); \
4297 _argvec[2] = (unsigned long)(arg2); \
4298 __asm__ volatile( \
4299 VALGRIND_ALIGN_STACK \
4300 "ldr x0, [%1, #8] \n\t" \
4301 "ldr x1, [%1, #16] \n\t" \
4302 "ldr x8, [%1] \n\t" /* target->x8 */ \
4303 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4304 VALGRIND_RESTORE_STACK \
4305 "mov %0, x0\n" \
4306 : /*out*/ "=r" (_res) \
4307 : /*in*/ "0" (&_argvec[0]) \
4308 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4309 ); \
4310 lval = (__typeof__(lval)) _res; \
4311 } while (0)
4312
4313#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4314 do { \
4315 volatile OrigFn _orig = (orig); \
4316 volatile unsigned long _argvec[4]; \
4317 volatile unsigned long _res; \
4318 _argvec[0] = (unsigned long)_orig.nraddr; \
4319 _argvec[1] = (unsigned long)(arg1); \
4320 _argvec[2] = (unsigned long)(arg2); \
4321 _argvec[3] = (unsigned long)(arg3); \
4322 __asm__ volatile( \
4323 VALGRIND_ALIGN_STACK \
4324 "ldr x0, [%1, #8] \n\t" \
4325 "ldr x1, [%1, #16] \n\t" \
4326 "ldr x2, [%1, #24] \n\t" \
4327 "ldr x8, [%1] \n\t" /* target->x8 */ \
4328 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4329 VALGRIND_RESTORE_STACK \
4330 "mov %0, x0\n" \
4331 : /*out*/ "=r" (_res) \
4332 : /*in*/ "0" (&_argvec[0]) \
4333 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4334 ); \
4335 lval = (__typeof__(lval)) _res; \
4336 } while (0)
4337
4338#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4339 do { \
4340 volatile OrigFn _orig = (orig); \
4341 volatile unsigned long _argvec[5]; \
4342 volatile unsigned long _res; \
4343 _argvec[0] = (unsigned long)_orig.nraddr; \
4344 _argvec[1] = (unsigned long)(arg1); \
4345 _argvec[2] = (unsigned long)(arg2); \
4346 _argvec[3] = (unsigned long)(arg3); \
4347 _argvec[4] = (unsigned long)(arg4); \
4348 __asm__ volatile( \
4349 VALGRIND_ALIGN_STACK \
4350 "ldr x0, [%1, #8] \n\t" \
4351 "ldr x1, [%1, #16] \n\t" \
4352 "ldr x2, [%1, #24] \n\t" \
4353 "ldr x3, [%1, #32] \n\t" \
4354 "ldr x8, [%1] \n\t" /* target->x8 */ \
4355 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4356 VALGRIND_RESTORE_STACK \
4357 "mov %0, x0" \
4358 : /*out*/ "=r" (_res) \
4359 : /*in*/ "0" (&_argvec[0]) \
4360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4361 ); \
4362 lval = (__typeof__(lval)) _res; \
4363 } while (0)
4364
4365#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4366 do { \
4367 volatile OrigFn _orig = (orig); \
4368 volatile unsigned long _argvec[6]; \
4369 volatile unsigned long _res; \
4370 _argvec[0] = (unsigned long)_orig.nraddr; \
4371 _argvec[1] = (unsigned long)(arg1); \
4372 _argvec[2] = (unsigned long)(arg2); \
4373 _argvec[3] = (unsigned long)(arg3); \
4374 _argvec[4] = (unsigned long)(arg4); \
4375 _argvec[5] = (unsigned long)(arg5); \
4376 __asm__ volatile( \
4377 VALGRIND_ALIGN_STACK \
4378 "ldr x0, [%1, #8] \n\t" \
4379 "ldr x1, [%1, #16] \n\t" \
4380 "ldr x2, [%1, #24] \n\t" \
4381 "ldr x3, [%1, #32] \n\t" \
4382 "ldr x4, [%1, #40] \n\t" \
4383 "ldr x8, [%1] \n\t" /* target->x8 */ \
4384 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4385 VALGRIND_RESTORE_STACK \
4386 "mov %0, x0" \
4387 : /*out*/ "=r" (_res) \
4388 : /*in*/ "0" (&_argvec[0]) \
4389 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4390 ); \
4391 lval = (__typeof__(lval)) _res; \
4392 } while (0)
4393
4394#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4395 do { \
4396 volatile OrigFn _orig = (orig); \
4397 volatile unsigned long _argvec[7]; \
4398 volatile unsigned long _res; \
4399 _argvec[0] = (unsigned long)_orig.nraddr; \
4400 _argvec[1] = (unsigned long)(arg1); \
4401 _argvec[2] = (unsigned long)(arg2); \
4402 _argvec[3] = (unsigned long)(arg3); \
4403 _argvec[4] = (unsigned long)(arg4); \
4404 _argvec[5] = (unsigned long)(arg5); \
4405 _argvec[6] = (unsigned long)(arg6); \
4406 __asm__ volatile( \
4407 VALGRIND_ALIGN_STACK \
4408 "ldr x0, [%1, #8] \n\t" \
4409 "ldr x1, [%1, #16] \n\t" \
4410 "ldr x2, [%1, #24] \n\t" \
4411 "ldr x3, [%1, #32] \n\t" \
4412 "ldr x4, [%1, #40] \n\t" \
4413 "ldr x5, [%1, #48] \n\t" \
4414 "ldr x8, [%1] \n\t" /* target->x8 */ \
4415 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4416 VALGRIND_RESTORE_STACK \
4417 "mov %0, x0" \
4418 : /*out*/ "=r" (_res) \
4419 : /*in*/ "0" (&_argvec[0]) \
4420 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4421 ); \
4422 lval = (__typeof__(lval)) _res; \
4423 } while (0)
4424
4425#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4426 arg7) \
4427 do { \
4428 volatile OrigFn _orig = (orig); \
4429 volatile unsigned long _argvec[8]; \
4430 volatile unsigned long _res; \
4431 _argvec[0] = (unsigned long)_orig.nraddr; \
4432 _argvec[1] = (unsigned long)(arg1); \
4433 _argvec[2] = (unsigned long)(arg2); \
4434 _argvec[3] = (unsigned long)(arg3); \
4435 _argvec[4] = (unsigned long)(arg4); \
4436 _argvec[5] = (unsigned long)(arg5); \
4437 _argvec[6] = (unsigned long)(arg6); \
4438 _argvec[7] = (unsigned long)(arg7); \
4439 __asm__ volatile( \
4440 VALGRIND_ALIGN_STACK \
4441 "ldr x0, [%1, #8] \n\t" \
4442 "ldr x1, [%1, #16] \n\t" \
4443 "ldr x2, [%1, #24] \n\t" \
4444 "ldr x3, [%1, #32] \n\t" \
4445 "ldr x4, [%1, #40] \n\t" \
4446 "ldr x5, [%1, #48] \n\t" \
4447 "ldr x6, [%1, #56] \n\t" \
4448 "ldr x8, [%1] \n\t" /* target->x8 */ \
4449 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4450 VALGRIND_RESTORE_STACK \
4451 "mov %0, x0" \
4452 : /*out*/ "=r" (_res) \
4453 : /*in*/ "0" (&_argvec[0]) \
4454 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4455 ); \
4456 lval = (__typeof__(lval)) _res; \
4457 } while (0)
4458
4459#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4460 arg7,arg8) \
4461 do { \
4462 volatile OrigFn _orig = (orig); \
4463 volatile unsigned long _argvec[9]; \
4464 volatile unsigned long _res; \
4465 _argvec[0] = (unsigned long)_orig.nraddr; \
4466 _argvec[1] = (unsigned long)(arg1); \
4467 _argvec[2] = (unsigned long)(arg2); \
4468 _argvec[3] = (unsigned long)(arg3); \
4469 _argvec[4] = (unsigned long)(arg4); \
4470 _argvec[5] = (unsigned long)(arg5); \
4471 _argvec[6] = (unsigned long)(arg6); \
4472 _argvec[7] = (unsigned long)(arg7); \
4473 _argvec[8] = (unsigned long)(arg8); \
4474 __asm__ volatile( \
4475 VALGRIND_ALIGN_STACK \
4476 "ldr x0, [%1, #8] \n\t" \
4477 "ldr x1, [%1, #16] \n\t" \
4478 "ldr x2, [%1, #24] \n\t" \
4479 "ldr x3, [%1, #32] \n\t" \
4480 "ldr x4, [%1, #40] \n\t" \
4481 "ldr x5, [%1, #48] \n\t" \
4482 "ldr x6, [%1, #56] \n\t" \
4483 "ldr x7, [%1, #64] \n\t" \
4484 "ldr x8, [%1] \n\t" /* target->x8 */ \
4485 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4486 VALGRIND_RESTORE_STACK \
4487 "mov %0, x0" \
4488 : /*out*/ "=r" (_res) \
4489 : /*in*/ "0" (&_argvec[0]) \
4490 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4491 ); \
4492 lval = (__typeof__(lval)) _res; \
4493 } while (0)
4494
4495#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4496 arg7,arg8,arg9) \
4497 do { \
4498 volatile OrigFn _orig = (orig); \
4499 volatile unsigned long _argvec[10]; \
4500 volatile unsigned long _res; \
4501 _argvec[0] = (unsigned long)_orig.nraddr; \
4502 _argvec[1] = (unsigned long)(arg1); \
4503 _argvec[2] = (unsigned long)(arg2); \
4504 _argvec[3] = (unsigned long)(arg3); \
4505 _argvec[4] = (unsigned long)(arg4); \
4506 _argvec[5] = (unsigned long)(arg5); \
4507 _argvec[6] = (unsigned long)(arg6); \
4508 _argvec[7] = (unsigned long)(arg7); \
4509 _argvec[8] = (unsigned long)(arg8); \
4510 _argvec[9] = (unsigned long)(arg9); \
4511 __asm__ volatile( \
4512 VALGRIND_ALIGN_STACK \
4513 "sub sp, sp, #0x20 \n\t" \
4514 "ldr x0, [%1, #8] \n\t" \
4515 "ldr x1, [%1, #16] \n\t" \
4516 "ldr x2, [%1, #24] \n\t" \
4517 "ldr x3, [%1, #32] \n\t" \
4518 "ldr x4, [%1, #40] \n\t" \
4519 "ldr x5, [%1, #48] \n\t" \
4520 "ldr x6, [%1, #56] \n\t" \
4521 "ldr x7, [%1, #64] \n\t" \
4522 "ldr x8, [%1, #72] \n\t" \
4523 "str x8, [sp, #0] \n\t" \
4524 "ldr x8, [%1] \n\t" /* target->x8 */ \
4525 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4526 VALGRIND_RESTORE_STACK \
4527 "mov %0, x0" \
4528 : /*out*/ "=r" (_res) \
4529 : /*in*/ "0" (&_argvec[0]) \
4530 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4531 ); \
4532 lval = (__typeof__(lval)) _res; \
4533 } while (0)
4534
4535#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4536 arg7,arg8,arg9,arg10) \
4537 do { \
4538 volatile OrigFn _orig = (orig); \
4539 volatile unsigned long _argvec[11]; \
4540 volatile unsigned long _res; \
4541 _argvec[0] = (unsigned long)_orig.nraddr; \
4542 _argvec[1] = (unsigned long)(arg1); \
4543 _argvec[2] = (unsigned long)(arg2); \
4544 _argvec[3] = (unsigned long)(arg3); \
4545 _argvec[4] = (unsigned long)(arg4); \
4546 _argvec[5] = (unsigned long)(arg5); \
4547 _argvec[6] = (unsigned long)(arg6); \
4548 _argvec[7] = (unsigned long)(arg7); \
4549 _argvec[8] = (unsigned long)(arg8); \
4550 _argvec[9] = (unsigned long)(arg9); \
4551 _argvec[10] = (unsigned long)(arg10); \
4552 __asm__ volatile( \
4553 VALGRIND_ALIGN_STACK \
4554 "sub sp, sp, #0x20 \n\t" \
4555 "ldr x0, [%1, #8] \n\t" \
4556 "ldr x1, [%1, #16] \n\t" \
4557 "ldr x2, [%1, #24] \n\t" \
4558 "ldr x3, [%1, #32] \n\t" \
4559 "ldr x4, [%1, #40] \n\t" \
4560 "ldr x5, [%1, #48] \n\t" \
4561 "ldr x6, [%1, #56] \n\t" \
4562 "ldr x7, [%1, #64] \n\t" \
4563 "ldr x8, [%1, #72] \n\t" \
4564 "str x8, [sp, #0] \n\t" \
4565 "ldr x8, [%1, #80] \n\t" \
4566 "str x8, [sp, #8] \n\t" \
4567 "ldr x8, [%1] \n\t" /* target->x8 */ \
4568 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4569 VALGRIND_RESTORE_STACK \
4570 "mov %0, x0" \
4571 : /*out*/ "=r" (_res) \
4572 : /*in*/ "0" (&_argvec[0]) \
4573 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4574 ); \
4575 lval = (__typeof__(lval)) _res; \
4576 } while (0)
4577
4578#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4579 arg7,arg8,arg9,arg10,arg11) \
4580 do { \
4581 volatile OrigFn _orig = (orig); \
4582 volatile unsigned long _argvec[12]; \
4583 volatile unsigned long _res; \
4584 _argvec[0] = (unsigned long)_orig.nraddr; \
4585 _argvec[1] = (unsigned long)(arg1); \
4586 _argvec[2] = (unsigned long)(arg2); \
4587 _argvec[3] = (unsigned long)(arg3); \
4588 _argvec[4] = (unsigned long)(arg4); \
4589 _argvec[5] = (unsigned long)(arg5); \
4590 _argvec[6] = (unsigned long)(arg6); \
4591 _argvec[7] = (unsigned long)(arg7); \
4592 _argvec[8] = (unsigned long)(arg8); \
4593 _argvec[9] = (unsigned long)(arg9); \
4594 _argvec[10] = (unsigned long)(arg10); \
4595 _argvec[11] = (unsigned long)(arg11); \
4596 __asm__ volatile( \
4597 VALGRIND_ALIGN_STACK \
4598 "sub sp, sp, #0x30 \n\t" \
4599 "ldr x0, [%1, #8] \n\t" \
4600 "ldr x1, [%1, #16] \n\t" \
4601 "ldr x2, [%1, #24] \n\t" \
4602 "ldr x3, [%1, #32] \n\t" \
4603 "ldr x4, [%1, #40] \n\t" \
4604 "ldr x5, [%1, #48] \n\t" \
4605 "ldr x6, [%1, #56] \n\t" \
4606 "ldr x7, [%1, #64] \n\t" \
4607 "ldr x8, [%1, #72] \n\t" \
4608 "str x8, [sp, #0] \n\t" \
4609 "ldr x8, [%1, #80] \n\t" \
4610 "str x8, [sp, #8] \n\t" \
4611 "ldr x8, [%1, #88] \n\t" \
4612 "str x8, [sp, #16] \n\t" \
4613 "ldr x8, [%1] \n\t" /* target->x8 */ \
4614 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4615 VALGRIND_RESTORE_STACK \
4616 "mov %0, x0" \
4617 : /*out*/ "=r" (_res) \
4618 : /*in*/ "0" (&_argvec[0]) \
4619 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4620 ); \
4621 lval = (__typeof__(lval)) _res; \
4622 } while (0)
4623
4624#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4625 arg7,arg8,arg9,arg10,arg11, \
4626 arg12) \
4627 do { \
4628 volatile OrigFn _orig = (orig); \
4629 volatile unsigned long _argvec[13]; \
4630 volatile unsigned long _res; \
4631 _argvec[0] = (unsigned long)_orig.nraddr; \
4632 _argvec[1] = (unsigned long)(arg1); \
4633 _argvec[2] = (unsigned long)(arg2); \
4634 _argvec[3] = (unsigned long)(arg3); \
4635 _argvec[4] = (unsigned long)(arg4); \
4636 _argvec[5] = (unsigned long)(arg5); \
4637 _argvec[6] = (unsigned long)(arg6); \
4638 _argvec[7] = (unsigned long)(arg7); \
4639 _argvec[8] = (unsigned long)(arg8); \
4640 _argvec[9] = (unsigned long)(arg9); \
4641 _argvec[10] = (unsigned long)(arg10); \
4642 _argvec[11] = (unsigned long)(arg11); \
4643 _argvec[12] = (unsigned long)(arg12); \
4644 __asm__ volatile( \
4645 VALGRIND_ALIGN_STACK \
4646 "sub sp, sp, #0x30 \n\t" \
4647 "ldr x0, [%1, #8] \n\t" \
4648 "ldr x1, [%1, #16] \n\t" \
4649 "ldr x2, [%1, #24] \n\t" \
4650 "ldr x3, [%1, #32] \n\t" \
4651 "ldr x4, [%1, #40] \n\t" \
4652 "ldr x5, [%1, #48] \n\t" \
4653 "ldr x6, [%1, #56] \n\t" \
4654 "ldr x7, [%1, #64] \n\t" \
4655 "ldr x8, [%1, #72] \n\t" \
4656 "str x8, [sp, #0] \n\t" \
4657 "ldr x8, [%1, #80] \n\t" \
4658 "str x8, [sp, #8] \n\t" \
4659 "ldr x8, [%1, #88] \n\t" \
4660 "str x8, [sp, #16] \n\t" \
4661 "ldr x8, [%1, #96] \n\t" \
4662 "str x8, [sp, #24] \n\t" \
4663 "ldr x8, [%1] \n\t" /* target->x8 */ \
4664 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4665 VALGRIND_RESTORE_STACK \
4666 "mov %0, x0" \
4667 : /*out*/ "=r" (_res) \
4668 : /*in*/ "0" (&_argvec[0]) \
4669 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4670 ); \
4671 lval = (__typeof__(lval)) _res; \
4672 } while (0)
4673
4674#endif /* PLAT_arm64_linux */
4675
4676/* ------------------------- s390x-linux ------------------------- */
4677
4678#if defined(PLAT_s390x_linux)
4679
4680/* Similar workaround as amd64 (see above), but we use r11 as frame
4681 pointer and save the old r11 in r7. r11 might be used for
4682 argvec, therefore we copy argvec in r1 since r1 is clobbered
4683 after the call anyway. */
4684#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4685# define __FRAME_POINTER \
4686 ,"d"(__builtin_dwarf_cfa())
4687# define VALGRIND_CFI_PROLOGUE \
4688 ".cfi_remember_state\n\t" \
4689 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4690 "lgr 7,11\n\t" \
4691 "lgr 11,%2\n\t" \
4692 ".cfi_def_cfa r11, 0\n\t"
4693# define VALGRIND_CFI_EPILOGUE \
4694 "lgr 11, 7\n\t" \
4695 ".cfi_restore_state\n\t"
4696#else
4697# define __FRAME_POINTER
4698# define VALGRIND_CFI_PROLOGUE \
4699 "lgr 1,%1\n\t"
4700# define VALGRIND_CFI_EPILOGUE
4701#endif
4702
4703/* Nb: On s390 the stack pointer is properly aligned *at all times*
4704 according to the s390 GCC maintainer. (The ABI specification is not
4705 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4706 VALGRIND_RESTORE_STACK are not defined here. */
4707
4708/* These regs are trashed by the hidden call. Note that we overwrite
4709 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4710 function a proper return address. All others are ABI defined call
4711 clobbers. */
4712#if defined(__VX__) || defined(__S390_VX__)
4713#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4714 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4715 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4716 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4717 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4718#else
4719#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4720 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4721#endif
4722
4723/* Nb: Although r11 is modified in the asm snippets below (inside
4724 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4725 two reasons:
4726 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4727 modified
4728 (2) GCC will complain that r11 cannot appear inside a clobber section,
4729 when compiled with -O -fno-omit-frame-pointer
4730 */
4731
4732#define CALL_FN_W_v(lval, orig) \
4733 do { \
4734 volatile OrigFn _orig = (orig); \
4735 volatile unsigned long _argvec[1]; \
4736 volatile unsigned long _res; \
4737 _argvec[0] = (unsigned long)_orig.nraddr; \
4738 __asm__ volatile( \
4739 VALGRIND_CFI_PROLOGUE \
4740 "aghi 15,-160\n\t" \
4741 "lg 1, 0(1)\n\t" /* target->r1 */ \
4742 VALGRIND_CALL_NOREDIR_R1 \
4743 "aghi 15,160\n\t" \
4744 VALGRIND_CFI_EPILOGUE \
4745 "lgr %0, 2\n\t" \
4746 : /*out*/ "=d" (_res) \
4747 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4749 ); \
4750 lval = (__typeof__(lval)) _res; \
4751 } while (0)
4752
4753/* The call abi has the arguments in r2-r6 and stack */
4754#define CALL_FN_W_W(lval, orig, arg1) \
4755 do { \
4756 volatile OrigFn _orig = (orig); \
4757 volatile unsigned long _argvec[2]; \
4758 volatile unsigned long _res; \
4759 _argvec[0] = (unsigned long)_orig.nraddr; \
4760 _argvec[1] = (unsigned long)arg1; \
4761 __asm__ volatile( \
4762 VALGRIND_CFI_PROLOGUE \
4763 "aghi 15,-160\n\t" \
4764 "lg 2, 8(1)\n\t" \
4765 "lg 1, 0(1)\n\t" \
4766 VALGRIND_CALL_NOREDIR_R1 \
4767 "aghi 15,160\n\t" \
4768 VALGRIND_CFI_EPILOGUE \
4769 "lgr %0, 2\n\t" \
4770 : /*out*/ "=d" (_res) \
4771 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4772 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4773 ); \
4774 lval = (__typeof__(lval)) _res; \
4775 } while (0)
4776
4777#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4778 do { \
4779 volatile OrigFn _orig = (orig); \
4780 volatile unsigned long _argvec[3]; \
4781 volatile unsigned long _res; \
4782 _argvec[0] = (unsigned long)_orig.nraddr; \
4783 _argvec[1] = (unsigned long)arg1; \
4784 _argvec[2] = (unsigned long)arg2; \
4785 __asm__ volatile( \
4786 VALGRIND_CFI_PROLOGUE \
4787 "aghi 15,-160\n\t" \
4788 "lg 2, 8(1)\n\t" \
4789 "lg 3,16(1)\n\t" \
4790 "lg 1, 0(1)\n\t" \
4791 VALGRIND_CALL_NOREDIR_R1 \
4792 "aghi 15,160\n\t" \
4793 VALGRIND_CFI_EPILOGUE \
4794 "lgr %0, 2\n\t" \
4795 : /*out*/ "=d" (_res) \
4796 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4797 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4798 ); \
4799 lval = (__typeof__(lval)) _res; \
4800 } while (0)
4801
4802#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4803 do { \
4804 volatile OrigFn _orig = (orig); \
4805 volatile unsigned long _argvec[4]; \
4806 volatile unsigned long _res; \
4807 _argvec[0] = (unsigned long)_orig.nraddr; \
4808 _argvec[1] = (unsigned long)arg1; \
4809 _argvec[2] = (unsigned long)arg2; \
4810 _argvec[3] = (unsigned long)arg3; \
4811 __asm__ volatile( \
4812 VALGRIND_CFI_PROLOGUE \
4813 "aghi 15,-160\n\t" \
4814 "lg 2, 8(1)\n\t" \
4815 "lg 3,16(1)\n\t" \
4816 "lg 4,24(1)\n\t" \
4817 "lg 1, 0(1)\n\t" \
4818 VALGRIND_CALL_NOREDIR_R1 \
4819 "aghi 15,160\n\t" \
4820 VALGRIND_CFI_EPILOGUE \
4821 "lgr %0, 2\n\t" \
4822 : /*out*/ "=d" (_res) \
4823 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4824 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4825 ); \
4826 lval = (__typeof__(lval)) _res; \
4827 } while (0)
4828
4829#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4830 do { \
4831 volatile OrigFn _orig = (orig); \
4832 volatile unsigned long _argvec[5]; \
4833 volatile unsigned long _res; \
4834 _argvec[0] = (unsigned long)_orig.nraddr; \
4835 _argvec[1] = (unsigned long)arg1; \
4836 _argvec[2] = (unsigned long)arg2; \
4837 _argvec[3] = (unsigned long)arg3; \
4838 _argvec[4] = (unsigned long)arg4; \
4839 __asm__ volatile( \
4840 VALGRIND_CFI_PROLOGUE \
4841 "aghi 15,-160\n\t" \
4842 "lg 2, 8(1)\n\t" \
4843 "lg 3,16(1)\n\t" \
4844 "lg 4,24(1)\n\t" \
4845 "lg 5,32(1)\n\t" \
4846 "lg 1, 0(1)\n\t" \
4847 VALGRIND_CALL_NOREDIR_R1 \
4848 "aghi 15,160\n\t" \
4849 VALGRIND_CFI_EPILOGUE \
4850 "lgr %0, 2\n\t" \
4851 : /*out*/ "=d" (_res) \
4852 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4853 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4854 ); \
4855 lval = (__typeof__(lval)) _res; \
4856 } while (0)
4857
4858#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4859 do { \
4860 volatile OrigFn _orig = (orig); \
4861 volatile unsigned long _argvec[6]; \
4862 volatile unsigned long _res; \
4863 _argvec[0] = (unsigned long)_orig.nraddr; \
4864 _argvec[1] = (unsigned long)arg1; \
4865 _argvec[2] = (unsigned long)arg2; \
4866 _argvec[3] = (unsigned long)arg3; \
4867 _argvec[4] = (unsigned long)arg4; \
4868 _argvec[5] = (unsigned long)arg5; \
4869 __asm__ volatile( \
4870 VALGRIND_CFI_PROLOGUE \
4871 "aghi 15,-160\n\t" \
4872 "lg 2, 8(1)\n\t" \
4873 "lg 3,16(1)\n\t" \
4874 "lg 4,24(1)\n\t" \
4875 "lg 5,32(1)\n\t" \
4876 "lg 6,40(1)\n\t" \
4877 "lg 1, 0(1)\n\t" \
4878 VALGRIND_CALL_NOREDIR_R1 \
4879 "aghi 15,160\n\t" \
4880 VALGRIND_CFI_EPILOGUE \
4881 "lgr %0, 2\n\t" \
4882 : /*out*/ "=d" (_res) \
4883 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4885 ); \
4886 lval = (__typeof__(lval)) _res; \
4887 } while (0)
4888
4889#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4890 arg6) \
4891 do { \
4892 volatile OrigFn _orig = (orig); \
4893 volatile unsigned long _argvec[7]; \
4894 volatile unsigned long _res; \
4895 _argvec[0] = (unsigned long)_orig.nraddr; \
4896 _argvec[1] = (unsigned long)arg1; \
4897 _argvec[2] = (unsigned long)arg2; \
4898 _argvec[3] = (unsigned long)arg3; \
4899 _argvec[4] = (unsigned long)arg4; \
4900 _argvec[5] = (unsigned long)arg5; \
4901 _argvec[6] = (unsigned long)arg6; \
4902 __asm__ volatile( \
4903 VALGRIND_CFI_PROLOGUE \
4904 "aghi 15,-168\n\t" \
4905 "lg 2, 8(1)\n\t" \
4906 "lg 3,16(1)\n\t" \
4907 "lg 4,24(1)\n\t" \
4908 "lg 5,32(1)\n\t" \
4909 "lg 6,40(1)\n\t" \
4910 "mvc 160(8,15), 48(1)\n\t" \
4911 "lg 1, 0(1)\n\t" \
4912 VALGRIND_CALL_NOREDIR_R1 \
4913 "aghi 15,168\n\t" \
4914 VALGRIND_CFI_EPILOGUE \
4915 "lgr %0, 2\n\t" \
4916 : /*out*/ "=d" (_res) \
4917 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4919 ); \
4920 lval = (__typeof__(lval)) _res; \
4921 } while (0)
4922
4923#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4924 arg6, arg7) \
4925 do { \
4926 volatile OrigFn _orig = (orig); \
4927 volatile unsigned long _argvec[8]; \
4928 volatile unsigned long _res; \
4929 _argvec[0] = (unsigned long)_orig.nraddr; \
4930 _argvec[1] = (unsigned long)arg1; \
4931 _argvec[2] = (unsigned long)arg2; \
4932 _argvec[3] = (unsigned long)arg3; \
4933 _argvec[4] = (unsigned long)arg4; \
4934 _argvec[5] = (unsigned long)arg5; \
4935 _argvec[6] = (unsigned long)arg6; \
4936 _argvec[7] = (unsigned long)arg7; \
4937 __asm__ volatile( \
4938 VALGRIND_CFI_PROLOGUE \
4939 "aghi 15,-176\n\t" \
4940 "lg 2, 8(1)\n\t" \
4941 "lg 3,16(1)\n\t" \
4942 "lg 4,24(1)\n\t" \
4943 "lg 5,32(1)\n\t" \
4944 "lg 6,40(1)\n\t" \
4945 "mvc 160(8,15), 48(1)\n\t" \
4946 "mvc 168(8,15), 56(1)\n\t" \
4947 "lg 1, 0(1)\n\t" \
4948 VALGRIND_CALL_NOREDIR_R1 \
4949 "aghi 15,176\n\t" \
4950 VALGRIND_CFI_EPILOGUE \
4951 "lgr %0, 2\n\t" \
4952 : /*out*/ "=d" (_res) \
4953 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4954 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4955 ); \
4956 lval = (__typeof__(lval)) _res; \
4957 } while (0)
4958
4959#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4960 arg6, arg7 ,arg8) \
4961 do { \
4962 volatile OrigFn _orig = (orig); \
4963 volatile unsigned long _argvec[9]; \
4964 volatile unsigned long _res; \
4965 _argvec[0] = (unsigned long)_orig.nraddr; \
4966 _argvec[1] = (unsigned long)arg1; \
4967 _argvec[2] = (unsigned long)arg2; \
4968 _argvec[3] = (unsigned long)arg3; \
4969 _argvec[4] = (unsigned long)arg4; \
4970 _argvec[5] = (unsigned long)arg5; \
4971 _argvec[6] = (unsigned long)arg6; \
4972 _argvec[7] = (unsigned long)arg7; \
4973 _argvec[8] = (unsigned long)arg8; \
4974 __asm__ volatile( \
4975 VALGRIND_CFI_PROLOGUE \
4976 "aghi 15,-184\n\t" \
4977 "lg 2, 8(1)\n\t" \
4978 "lg 3,16(1)\n\t" \
4979 "lg 4,24(1)\n\t" \
4980 "lg 5,32(1)\n\t" \
4981 "lg 6,40(1)\n\t" \
4982 "mvc 160(8,15), 48(1)\n\t" \
4983 "mvc 168(8,15), 56(1)\n\t" \
4984 "mvc 176(8,15), 64(1)\n\t" \
4985 "lg 1, 0(1)\n\t" \
4986 VALGRIND_CALL_NOREDIR_R1 \
4987 "aghi 15,184\n\t" \
4988 VALGRIND_CFI_EPILOGUE \
4989 "lgr %0, 2\n\t" \
4990 : /*out*/ "=d" (_res) \
4991 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4992 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4993 ); \
4994 lval = (__typeof__(lval)) _res; \
4995 } while (0)
4996
4997#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4998 arg6, arg7 ,arg8, arg9) \
4999 do { \
5000 volatile OrigFn _orig = (orig); \
5001 volatile unsigned long _argvec[10]; \
5002 volatile unsigned long _res; \
5003 _argvec[0] = (unsigned long)_orig.nraddr; \
5004 _argvec[1] = (unsigned long)arg1; \
5005 _argvec[2] = (unsigned long)arg2; \
5006 _argvec[3] = (unsigned long)arg3; \
5007 _argvec[4] = (unsigned long)arg4; \
5008 _argvec[5] = (unsigned long)arg5; \
5009 _argvec[6] = (unsigned long)arg6; \
5010 _argvec[7] = (unsigned long)arg7; \
5011 _argvec[8] = (unsigned long)arg8; \
5012 _argvec[9] = (unsigned long)arg9; \
5013 __asm__ volatile( \
5014 VALGRIND_CFI_PROLOGUE \
5015 "aghi 15,-192\n\t" \
5016 "lg 2, 8(1)\n\t" \
5017 "lg 3,16(1)\n\t" \
5018 "lg 4,24(1)\n\t" \
5019 "lg 5,32(1)\n\t" \
5020 "lg 6,40(1)\n\t" \
5021 "mvc 160(8,15), 48(1)\n\t" \
5022 "mvc 168(8,15), 56(1)\n\t" \
5023 "mvc 176(8,15), 64(1)\n\t" \
5024 "mvc 184(8,15), 72(1)\n\t" \
5025 "lg 1, 0(1)\n\t" \
5026 VALGRIND_CALL_NOREDIR_R1 \
5027 "aghi 15,192\n\t" \
5028 VALGRIND_CFI_EPILOGUE \
5029 "lgr %0, 2\n\t" \
5030 : /*out*/ "=d" (_res) \
5031 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5032 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5033 ); \
5034 lval = (__typeof__(lval)) _res; \
5035 } while (0)
5036
5037#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5038 arg6, arg7 ,arg8, arg9, arg10) \
5039 do { \
5040 volatile OrigFn _orig = (orig); \
5041 volatile unsigned long _argvec[11]; \
5042 volatile unsigned long _res; \
5043 _argvec[0] = (unsigned long)_orig.nraddr; \
5044 _argvec[1] = (unsigned long)arg1; \
5045 _argvec[2] = (unsigned long)arg2; \
5046 _argvec[3] = (unsigned long)arg3; \
5047 _argvec[4] = (unsigned long)arg4; \
5048 _argvec[5] = (unsigned long)arg5; \
5049 _argvec[6] = (unsigned long)arg6; \
5050 _argvec[7] = (unsigned long)arg7; \
5051 _argvec[8] = (unsigned long)arg8; \
5052 _argvec[9] = (unsigned long)arg9; \
5053 _argvec[10] = (unsigned long)arg10; \
5054 __asm__ volatile( \
5055 VALGRIND_CFI_PROLOGUE \
5056 "aghi 15,-200\n\t" \
5057 "lg 2, 8(1)\n\t" \
5058 "lg 3,16(1)\n\t" \
5059 "lg 4,24(1)\n\t" \
5060 "lg 5,32(1)\n\t" \
5061 "lg 6,40(1)\n\t" \
5062 "mvc 160(8,15), 48(1)\n\t" \
5063 "mvc 168(8,15), 56(1)\n\t" \
5064 "mvc 176(8,15), 64(1)\n\t" \
5065 "mvc 184(8,15), 72(1)\n\t" \
5066 "mvc 192(8,15), 80(1)\n\t" \
5067 "lg 1, 0(1)\n\t" \
5068 VALGRIND_CALL_NOREDIR_R1 \
5069 "aghi 15,200\n\t" \
5070 VALGRIND_CFI_EPILOGUE \
5071 "lgr %0, 2\n\t" \
5072 : /*out*/ "=d" (_res) \
5073 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5074 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5075 ); \
5076 lval = (__typeof__(lval)) _res; \
5077 } while (0)
5078
5079#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5080 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5081 do { \
5082 volatile OrigFn _orig = (orig); \
5083 volatile unsigned long _argvec[12]; \
5084 volatile unsigned long _res; \
5085 _argvec[0] = (unsigned long)_orig.nraddr; \
5086 _argvec[1] = (unsigned long)arg1; \
5087 _argvec[2] = (unsigned long)arg2; \
5088 _argvec[3] = (unsigned long)arg3; \
5089 _argvec[4] = (unsigned long)arg4; \
5090 _argvec[5] = (unsigned long)arg5; \
5091 _argvec[6] = (unsigned long)arg6; \
5092 _argvec[7] = (unsigned long)arg7; \
5093 _argvec[8] = (unsigned long)arg8; \
5094 _argvec[9] = (unsigned long)arg9; \
5095 _argvec[10] = (unsigned long)arg10; \
5096 _argvec[11] = (unsigned long)arg11; \
5097 __asm__ volatile( \
5098 VALGRIND_CFI_PROLOGUE \
5099 "aghi 15,-208\n\t" \
5100 "lg 2, 8(1)\n\t" \
5101 "lg 3,16(1)\n\t" \
5102 "lg 4,24(1)\n\t" \
5103 "lg 5,32(1)\n\t" \
5104 "lg 6,40(1)\n\t" \
5105 "mvc 160(8,15), 48(1)\n\t" \
5106 "mvc 168(8,15), 56(1)\n\t" \
5107 "mvc 176(8,15), 64(1)\n\t" \
5108 "mvc 184(8,15), 72(1)\n\t" \
5109 "mvc 192(8,15), 80(1)\n\t" \
5110 "mvc 200(8,15), 88(1)\n\t" \
5111 "lg 1, 0(1)\n\t" \
5112 VALGRIND_CALL_NOREDIR_R1 \
5113 "aghi 15,208\n\t" \
5114 VALGRIND_CFI_EPILOGUE \
5115 "lgr %0, 2\n\t" \
5116 : /*out*/ "=d" (_res) \
5117 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5118 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5119 ); \
5120 lval = (__typeof__(lval)) _res; \
5121 } while (0)
5122
5123#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5124 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5125 do { \
5126 volatile OrigFn _orig = (orig); \
5127 volatile unsigned long _argvec[13]; \
5128 volatile unsigned long _res; \
5129 _argvec[0] = (unsigned long)_orig.nraddr; \
5130 _argvec[1] = (unsigned long)arg1; \
5131 _argvec[2] = (unsigned long)arg2; \
5132 _argvec[3] = (unsigned long)arg3; \
5133 _argvec[4] = (unsigned long)arg4; \
5134 _argvec[5] = (unsigned long)arg5; \
5135 _argvec[6] = (unsigned long)arg6; \
5136 _argvec[7] = (unsigned long)arg7; \
5137 _argvec[8] = (unsigned long)arg8; \
5138 _argvec[9] = (unsigned long)arg9; \
5139 _argvec[10] = (unsigned long)arg10; \
5140 _argvec[11] = (unsigned long)arg11; \
5141 _argvec[12] = (unsigned long)arg12; \
5142 __asm__ volatile( \
5143 VALGRIND_CFI_PROLOGUE \
5144 "aghi 15,-216\n\t" \
5145 "lg 2, 8(1)\n\t" \
5146 "lg 3,16(1)\n\t" \
5147 "lg 4,24(1)\n\t" \
5148 "lg 5,32(1)\n\t" \
5149 "lg 6,40(1)\n\t" \
5150 "mvc 160(8,15), 48(1)\n\t" \
5151 "mvc 168(8,15), 56(1)\n\t" \
5152 "mvc 176(8,15), 64(1)\n\t" \
5153 "mvc 184(8,15), 72(1)\n\t" \
5154 "mvc 192(8,15), 80(1)\n\t" \
5155 "mvc 200(8,15), 88(1)\n\t" \
5156 "mvc 208(8,15), 96(1)\n\t" \
5157 "lg 1, 0(1)\n\t" \
5158 VALGRIND_CALL_NOREDIR_R1 \
5159 "aghi 15,216\n\t" \
5160 VALGRIND_CFI_EPILOGUE \
5161 "lgr %0, 2\n\t" \
5162 : /*out*/ "=d" (_res) \
5163 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5164 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5165 ); \
5166 lval = (__typeof__(lval)) _res; \
5167 } while (0)
5168
5169
5170#endif /* PLAT_s390x_linux */
5171
5172/* ------------------------- mips32-linux ----------------------- */
5173
5174#if defined(PLAT_mips32_linux)
5175
5176/* These regs are trashed by the hidden call. */
5177#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5178"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5179"$25", "$31"
5180
5181/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5182 long) == 4. */
5183
5184#define CALL_FN_W_v(lval, orig) \
5185 do { \
5186 volatile OrigFn _orig = (orig); \
5187 volatile unsigned long _argvec[1]; \
5188 volatile unsigned long _res; \
5189 _argvec[0] = (unsigned long)_orig.nraddr; \
5190 __asm__ volatile( \
5191 "subu $29, $29, 8 \n\t" \
5192 "sw $28, 0($29) \n\t" \
5193 "sw $31, 4($29) \n\t" \
5194 "subu $29, $29, 16 \n\t" \
5195 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5196 VALGRIND_CALL_NOREDIR_T9 \
5197 "addu $29, $29, 16\n\t" \
5198 "lw $28, 0($29) \n\t" \
5199 "lw $31, 4($29) \n\t" \
5200 "addu $29, $29, 8 \n\t" \
5201 "move %0, $2\n" \
5202 : /*out*/ "=r" (_res) \
5203 : /*in*/ "0" (&_argvec[0]) \
5204 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5205 ); \
5206 lval = (__typeof__(lval)) _res; \
5207 } while (0)
5208
5209#define CALL_FN_W_W(lval, orig, arg1) \
5210 do { \
5211 volatile OrigFn _orig = (orig); \
5212 volatile unsigned long _argvec[2]; \
5213 volatile unsigned long _res; \
5214 _argvec[0] = (unsigned long)_orig.nraddr; \
5215 _argvec[1] = (unsigned long)(arg1); \
5216 __asm__ volatile( \
5217 "subu $29, $29, 8 \n\t" \
5218 "sw $28, 0($29) \n\t" \
5219 "sw $31, 4($29) \n\t" \
5220 "subu $29, $29, 16 \n\t" \
5221 "lw $4, 4(%1) \n\t" /* arg1*/ \
5222 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5223 VALGRIND_CALL_NOREDIR_T9 \
5224 "addu $29, $29, 16 \n\t" \
5225 "lw $28, 0($29) \n\t" \
5226 "lw $31, 4($29) \n\t" \
5227 "addu $29, $29, 8 \n\t" \
5228 "move %0, $2\n" \
5229 : /*out*/ "=r" (_res) \
5230 : /*in*/ "0" (&_argvec[0]) \
5231 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5232 ); \
5233 lval = (__typeof__(lval)) _res; \
5234 } while (0)
5235
5236#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5237 do { \
5238 volatile OrigFn _orig = (orig); \
5239 volatile unsigned long _argvec[3]; \
5240 volatile unsigned long _res; \
5241 _argvec[0] = (unsigned long)_orig.nraddr; \
5242 _argvec[1] = (unsigned long)(arg1); \
5243 _argvec[2] = (unsigned long)(arg2); \
5244 __asm__ volatile( \
5245 "subu $29, $29, 8 \n\t" \
5246 "sw $28, 0($29) \n\t" \
5247 "sw $31, 4($29) \n\t" \
5248 "subu $29, $29, 16 \n\t" \
5249 "lw $4, 4(%1) \n\t" \
5250 "lw $5, 8(%1) \n\t" \
5251 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5252 VALGRIND_CALL_NOREDIR_T9 \
5253 "addu $29, $29, 16 \n\t" \
5254 "lw $28, 0($29) \n\t" \
5255 "lw $31, 4($29) \n\t" \
5256 "addu $29, $29, 8 \n\t" \
5257 "move %0, $2\n" \
5258 : /*out*/ "=r" (_res) \
5259 : /*in*/ "0" (&_argvec[0]) \
5260 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5261 ); \
5262 lval = (__typeof__(lval)) _res; \
5263 } while (0)
5264
5265#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5266 do { \
5267 volatile OrigFn _orig = (orig); \
5268 volatile unsigned long _argvec[4]; \
5269 volatile unsigned long _res; \
5270 _argvec[0] = (unsigned long)_orig.nraddr; \
5271 _argvec[1] = (unsigned long)(arg1); \
5272 _argvec[2] = (unsigned long)(arg2); \
5273 _argvec[3] = (unsigned long)(arg3); \
5274 __asm__ volatile( \
5275 "subu $29, $29, 8 \n\t" \
5276 "sw $28, 0($29) \n\t" \
5277 "sw $31, 4($29) \n\t" \
5278 "subu $29, $29, 16 \n\t" \
5279 "lw $4, 4(%1) \n\t" \
5280 "lw $5, 8(%1) \n\t" \
5281 "lw $6, 12(%1) \n\t" \
5282 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5283 VALGRIND_CALL_NOREDIR_T9 \
5284 "addu $29, $29, 16 \n\t" \
5285 "lw $28, 0($29) \n\t" \
5286 "lw $31, 4($29) \n\t" \
5287 "addu $29, $29, 8 \n\t" \
5288 "move %0, $2\n" \
5289 : /*out*/ "=r" (_res) \
5290 : /*in*/ "0" (&_argvec[0]) \
5291 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5292 ); \
5293 lval = (__typeof__(lval)) _res; \
5294 } while (0)
5295
5296#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5297 do { \
5298 volatile OrigFn _orig = (orig); \
5299 volatile unsigned long _argvec[5]; \
5300 volatile unsigned long _res; \
5301 _argvec[0] = (unsigned long)_orig.nraddr; \
5302 _argvec[1] = (unsigned long)(arg1); \
5303 _argvec[2] = (unsigned long)(arg2); \
5304 _argvec[3] = (unsigned long)(arg3); \
5305 _argvec[4] = (unsigned long)(arg4); \
5306 __asm__ volatile( \
5307 "subu $29, $29, 8 \n\t" \
5308 "sw $28, 0($29) \n\t" \
5309 "sw $31, 4($29) \n\t" \
5310 "subu $29, $29, 16 \n\t" \
5311 "lw $4, 4(%1) \n\t" \
5312 "lw $5, 8(%1) \n\t" \
5313 "lw $6, 12(%1) \n\t" \
5314 "lw $7, 16(%1) \n\t" \
5315 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5316 VALGRIND_CALL_NOREDIR_T9 \
5317 "addu $29, $29, 16 \n\t" \
5318 "lw $28, 0($29) \n\t" \
5319 "lw $31, 4($29) \n\t" \
5320 "addu $29, $29, 8 \n\t" \
5321 "move %0, $2\n" \
5322 : /*out*/ "=r" (_res) \
5323 : /*in*/ "0" (&_argvec[0]) \
5324 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5325 ); \
5326 lval = (__typeof__(lval)) _res; \
5327 } while (0)
5328
5329#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5330 do { \
5331 volatile OrigFn _orig = (orig); \
5332 volatile unsigned long _argvec[6]; \
5333 volatile unsigned long _res; \
5334 _argvec[0] = (unsigned long)_orig.nraddr; \
5335 _argvec[1] = (unsigned long)(arg1); \
5336 _argvec[2] = (unsigned long)(arg2); \
5337 _argvec[3] = (unsigned long)(arg3); \
5338 _argvec[4] = (unsigned long)(arg4); \
5339 _argvec[5] = (unsigned long)(arg5); \
5340 __asm__ volatile( \
5341 "subu $29, $29, 8 \n\t" \
5342 "sw $28, 0($29) \n\t" \
5343 "sw $31, 4($29) \n\t" \
5344 "lw $4, 20(%1) \n\t" \
5345 "subu $29, $29, 24\n\t" \
5346 "sw $4, 16($29) \n\t" \
5347 "lw $4, 4(%1) \n\t" \
5348 "lw $5, 8(%1) \n\t" \
5349 "lw $6, 12(%1) \n\t" \
5350 "lw $7, 16(%1) \n\t" \
5351 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5352 VALGRIND_CALL_NOREDIR_T9 \
5353 "addu $29, $29, 24 \n\t" \
5354 "lw $28, 0($29) \n\t" \
5355 "lw $31, 4($29) \n\t" \
5356 "addu $29, $29, 8 \n\t" \
5357 "move %0, $2\n" \
5358 : /*out*/ "=r" (_res) \
5359 : /*in*/ "0" (&_argvec[0]) \
5360 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5361 ); \
5362 lval = (__typeof__(lval)) _res; \
5363 } while (0)
5364#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5365 do { \
5366 volatile OrigFn _orig = (orig); \
5367 volatile unsigned long _argvec[7]; \
5368 volatile unsigned long _res; \
5369 _argvec[0] = (unsigned long)_orig.nraddr; \
5370 _argvec[1] = (unsigned long)(arg1); \
5371 _argvec[2] = (unsigned long)(arg2); \
5372 _argvec[3] = (unsigned long)(arg3); \
5373 _argvec[4] = (unsigned long)(arg4); \
5374 _argvec[5] = (unsigned long)(arg5); \
5375 _argvec[6] = (unsigned long)(arg6); \
5376 __asm__ volatile( \
5377 "subu $29, $29, 8 \n\t" \
5378 "sw $28, 0($29) \n\t" \
5379 "sw $31, 4($29) \n\t" \
5380 "lw $4, 20(%1) \n\t" \
5381 "subu $29, $29, 32\n\t" \
5382 "sw $4, 16($29) \n\t" \
5383 "lw $4, 24(%1) \n\t" \
5384 "nop\n\t" \
5385 "sw $4, 20($29) \n\t" \
5386 "lw $4, 4(%1) \n\t" \
5387 "lw $5, 8(%1) \n\t" \
5388 "lw $6, 12(%1) \n\t" \
5389 "lw $7, 16(%1) \n\t" \
5390 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5391 VALGRIND_CALL_NOREDIR_T9 \
5392 "addu $29, $29, 32 \n\t" \
5393 "lw $28, 0($29) \n\t" \
5394 "lw $31, 4($29) \n\t" \
5395 "addu $29, $29, 8 \n\t" \
5396 "move %0, $2\n" \
5397 : /*out*/ "=r" (_res) \
5398 : /*in*/ "0" (&_argvec[0]) \
5399 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5400 ); \
5401 lval = (__typeof__(lval)) _res; \
5402 } while (0)
5403
5404#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5405 arg7) \
5406 do { \
5407 volatile OrigFn _orig = (orig); \
5408 volatile unsigned long _argvec[8]; \
5409 volatile unsigned long _res; \
5410 _argvec[0] = (unsigned long)_orig.nraddr; \
5411 _argvec[1] = (unsigned long)(arg1); \
5412 _argvec[2] = (unsigned long)(arg2); \
5413 _argvec[3] = (unsigned long)(arg3); \
5414 _argvec[4] = (unsigned long)(arg4); \
5415 _argvec[5] = (unsigned long)(arg5); \
5416 _argvec[6] = (unsigned long)(arg6); \
5417 _argvec[7] = (unsigned long)(arg7); \
5418 __asm__ volatile( \
5419 "subu $29, $29, 8 \n\t" \
5420 "sw $28, 0($29) \n\t" \
5421 "sw $31, 4($29) \n\t" \
5422 "lw $4, 20(%1) \n\t" \
5423 "subu $29, $29, 32\n\t" \
5424 "sw $4, 16($29) \n\t" \
5425 "lw $4, 24(%1) \n\t" \
5426 "sw $4, 20($29) \n\t" \
5427 "lw $4, 28(%1) \n\t" \
5428 "sw $4, 24($29) \n\t" \
5429 "lw $4, 4(%1) \n\t" \
5430 "lw $5, 8(%1) \n\t" \
5431 "lw $6, 12(%1) \n\t" \
5432 "lw $7, 16(%1) \n\t" \
5433 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5434 VALGRIND_CALL_NOREDIR_T9 \
5435 "addu $29, $29, 32 \n\t" \
5436 "lw $28, 0($29) \n\t" \
5437 "lw $31, 4($29) \n\t" \
5438 "addu $29, $29, 8 \n\t" \
5439 "move %0, $2\n" \
5440 : /*out*/ "=r" (_res) \
5441 : /*in*/ "0" (&_argvec[0]) \
5442 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5443 ); \
5444 lval = (__typeof__(lval)) _res; \
5445 } while (0)
5446
5447#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5448 arg7,arg8) \
5449 do { \
5450 volatile OrigFn _orig = (orig); \
5451 volatile unsigned long _argvec[9]; \
5452 volatile unsigned long _res; \
5453 _argvec[0] = (unsigned long)_orig.nraddr; \
5454 _argvec[1] = (unsigned long)(arg1); \
5455 _argvec[2] = (unsigned long)(arg2); \
5456 _argvec[3] = (unsigned long)(arg3); \
5457 _argvec[4] = (unsigned long)(arg4); \
5458 _argvec[5] = (unsigned long)(arg5); \
5459 _argvec[6] = (unsigned long)(arg6); \
5460 _argvec[7] = (unsigned long)(arg7); \
5461 _argvec[8] = (unsigned long)(arg8); \
5462 __asm__ volatile( \
5463 "subu $29, $29, 8 \n\t" \
5464 "sw $28, 0($29) \n\t" \
5465 "sw $31, 4($29) \n\t" \
5466 "lw $4, 20(%1) \n\t" \
5467 "subu $29, $29, 40\n\t" \
5468 "sw $4, 16($29) \n\t" \
5469 "lw $4, 24(%1) \n\t" \
5470 "sw $4, 20($29) \n\t" \
5471 "lw $4, 28(%1) \n\t" \
5472 "sw $4, 24($29) \n\t" \
5473 "lw $4, 32(%1) \n\t" \
5474 "sw $4, 28($29) \n\t" \
5475 "lw $4, 4(%1) \n\t" \
5476 "lw $5, 8(%1) \n\t" \
5477 "lw $6, 12(%1) \n\t" \
5478 "lw $7, 16(%1) \n\t" \
5479 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5480 VALGRIND_CALL_NOREDIR_T9 \
5481 "addu $29, $29, 40 \n\t" \
5482 "lw $28, 0($29) \n\t" \
5483 "lw $31, 4($29) \n\t" \
5484 "addu $29, $29, 8 \n\t" \
5485 "move %0, $2\n" \
5486 : /*out*/ "=r" (_res) \
5487 : /*in*/ "0" (&_argvec[0]) \
5488 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5489 ); \
5490 lval = (__typeof__(lval)) _res; \
5491 } while (0)
5492
5493#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5494 arg7,arg8,arg9) \
5495 do { \
5496 volatile OrigFn _orig = (orig); \
5497 volatile unsigned long _argvec[10]; \
5498 volatile unsigned long _res; \
5499 _argvec[0] = (unsigned long)_orig.nraddr; \
5500 _argvec[1] = (unsigned long)(arg1); \
5501 _argvec[2] = (unsigned long)(arg2); \
5502 _argvec[3] = (unsigned long)(arg3); \
5503 _argvec[4] = (unsigned long)(arg4); \
5504 _argvec[5] = (unsigned long)(arg5); \
5505 _argvec[6] = (unsigned long)(arg6); \
5506 _argvec[7] = (unsigned long)(arg7); \
5507 _argvec[8] = (unsigned long)(arg8); \
5508 _argvec[9] = (unsigned long)(arg9); \
5509 __asm__ volatile( \
5510 "subu $29, $29, 8 \n\t" \
5511 "sw $28, 0($29) \n\t" \
5512 "sw $31, 4($29) \n\t" \
5513 "lw $4, 20(%1) \n\t" \
5514 "subu $29, $29, 40\n\t" \
5515 "sw $4, 16($29) \n\t" \
5516 "lw $4, 24(%1) \n\t" \
5517 "sw $4, 20($29) \n\t" \
5518 "lw $4, 28(%1) \n\t" \
5519 "sw $4, 24($29) \n\t" \
5520 "lw $4, 32(%1) \n\t" \
5521 "sw $4, 28($29) \n\t" \
5522 "lw $4, 36(%1) \n\t" \
5523 "sw $4, 32($29) \n\t" \
5524 "lw $4, 4(%1) \n\t" \
5525 "lw $5, 8(%1) \n\t" \
5526 "lw $6, 12(%1) \n\t" \
5527 "lw $7, 16(%1) \n\t" \
5528 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5529 VALGRIND_CALL_NOREDIR_T9 \
5530 "addu $29, $29, 40 \n\t" \
5531 "lw $28, 0($29) \n\t" \
5532 "lw $31, 4($29) \n\t" \
5533 "addu $29, $29, 8 \n\t" \
5534 "move %0, $2\n" \
5535 : /*out*/ "=r" (_res) \
5536 : /*in*/ "0" (&_argvec[0]) \
5537 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5538 ); \
5539 lval = (__typeof__(lval)) _res; \
5540 } while (0)
5541
5542#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5543 arg7,arg8,arg9,arg10) \
5544 do { \
5545 volatile OrigFn _orig = (orig); \
5546 volatile unsigned long _argvec[11]; \
5547 volatile unsigned long _res; \
5548 _argvec[0] = (unsigned long)_orig.nraddr; \
5549 _argvec[1] = (unsigned long)(arg1); \
5550 _argvec[2] = (unsigned long)(arg2); \
5551 _argvec[3] = (unsigned long)(arg3); \
5552 _argvec[4] = (unsigned long)(arg4); \
5553 _argvec[5] = (unsigned long)(arg5); \
5554 _argvec[6] = (unsigned long)(arg6); \
5555 _argvec[7] = (unsigned long)(arg7); \
5556 _argvec[8] = (unsigned long)(arg8); \
5557 _argvec[9] = (unsigned long)(arg9); \
5558 _argvec[10] = (unsigned long)(arg10); \
5559 __asm__ volatile( \
5560 "subu $29, $29, 8 \n\t" \
5561 "sw $28, 0($29) \n\t" \
5562 "sw $31, 4($29) \n\t" \
5563 "lw $4, 20(%1) \n\t" \
5564 "subu $29, $29, 48\n\t" \
5565 "sw $4, 16($29) \n\t" \
5566 "lw $4, 24(%1) \n\t" \
5567 "sw $4, 20($29) \n\t" \
5568 "lw $4, 28(%1) \n\t" \
5569 "sw $4, 24($29) \n\t" \
5570 "lw $4, 32(%1) \n\t" \
5571 "sw $4, 28($29) \n\t" \
5572 "lw $4, 36(%1) \n\t" \
5573 "sw $4, 32($29) \n\t" \
5574 "lw $4, 40(%1) \n\t" \
5575 "sw $4, 36($29) \n\t" \
5576 "lw $4, 4(%1) \n\t" \
5577 "lw $5, 8(%1) \n\t" \
5578 "lw $6, 12(%1) \n\t" \
5579 "lw $7, 16(%1) \n\t" \
5580 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5581 VALGRIND_CALL_NOREDIR_T9 \
5582 "addu $29, $29, 48 \n\t" \
5583 "lw $28, 0($29) \n\t" \
5584 "lw $31, 4($29) \n\t" \
5585 "addu $29, $29, 8 \n\t" \
5586 "move %0, $2\n" \
5587 : /*out*/ "=r" (_res) \
5588 : /*in*/ "0" (&_argvec[0]) \
5589 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5590 ); \
5591 lval = (__typeof__(lval)) _res; \
5592 } while (0)
5593
5594#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5595 arg6,arg7,arg8,arg9,arg10, \
5596 arg11) \
5597 do { \
5598 volatile OrigFn _orig = (orig); \
5599 volatile unsigned long _argvec[12]; \
5600 volatile unsigned long _res; \
5601 _argvec[0] = (unsigned long)_orig.nraddr; \
5602 _argvec[1] = (unsigned long)(arg1); \
5603 _argvec[2] = (unsigned long)(arg2); \
5604 _argvec[3] = (unsigned long)(arg3); \
5605 _argvec[4] = (unsigned long)(arg4); \
5606 _argvec[5] = (unsigned long)(arg5); \
5607 _argvec[6] = (unsigned long)(arg6); \
5608 _argvec[7] = (unsigned long)(arg7); \
5609 _argvec[8] = (unsigned long)(arg8); \
5610 _argvec[9] = (unsigned long)(arg9); \
5611 _argvec[10] = (unsigned long)(arg10); \
5612 _argvec[11] = (unsigned long)(arg11); \
5613 __asm__ volatile( \
5614 "subu $29, $29, 8 \n\t" \
5615 "sw $28, 0($29) \n\t" \
5616 "sw $31, 4($29) \n\t" \
5617 "lw $4, 20(%1) \n\t" \
5618 "subu $29, $29, 48\n\t" \
5619 "sw $4, 16($29) \n\t" \
5620 "lw $4, 24(%1) \n\t" \
5621 "sw $4, 20($29) \n\t" \
5622 "lw $4, 28(%1) \n\t" \
5623 "sw $4, 24($29) \n\t" \
5624 "lw $4, 32(%1) \n\t" \
5625 "sw $4, 28($29) \n\t" \
5626 "lw $4, 36(%1) \n\t" \
5627 "sw $4, 32($29) \n\t" \
5628 "lw $4, 40(%1) \n\t" \
5629 "sw $4, 36($29) \n\t" \
5630 "lw $4, 44(%1) \n\t" \
5631 "sw $4, 40($29) \n\t" \
5632 "lw $4, 4(%1) \n\t" \
5633 "lw $5, 8(%1) \n\t" \
5634 "lw $6, 12(%1) \n\t" \
5635 "lw $7, 16(%1) \n\t" \
5636 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5637 VALGRIND_CALL_NOREDIR_T9 \
5638 "addu $29, $29, 48 \n\t" \
5639 "lw $28, 0($29) \n\t" \
5640 "lw $31, 4($29) \n\t" \
5641 "addu $29, $29, 8 \n\t" \
5642 "move %0, $2\n" \
5643 : /*out*/ "=r" (_res) \
5644 : /*in*/ "0" (&_argvec[0]) \
5645 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5646 ); \
5647 lval = (__typeof__(lval)) _res; \
5648 } while (0)
5649
5650#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5651 arg6,arg7,arg8,arg9,arg10, \
5652 arg11,arg12) \
5653 do { \
5654 volatile OrigFn _orig = (orig); \
5655 volatile unsigned long _argvec[13]; \
5656 volatile unsigned long _res; \
5657 _argvec[0] = (unsigned long)_orig.nraddr; \
5658 _argvec[1] = (unsigned long)(arg1); \
5659 _argvec[2] = (unsigned long)(arg2); \
5660 _argvec[3] = (unsigned long)(arg3); \
5661 _argvec[4] = (unsigned long)(arg4); \
5662 _argvec[5] = (unsigned long)(arg5); \
5663 _argvec[6] = (unsigned long)(arg6); \
5664 _argvec[7] = (unsigned long)(arg7); \
5665 _argvec[8] = (unsigned long)(arg8); \
5666 _argvec[9] = (unsigned long)(arg9); \
5667 _argvec[10] = (unsigned long)(arg10); \
5668 _argvec[11] = (unsigned long)(arg11); \
5669 _argvec[12] = (unsigned long)(arg12); \
5670 __asm__ volatile( \
5671 "subu $29, $29, 8 \n\t" \
5672 "sw $28, 0($29) \n\t" \
5673 "sw $31, 4($29) \n\t" \
5674 "lw $4, 20(%1) \n\t" \
5675 "subu $29, $29, 56\n\t" \
5676 "sw $4, 16($29) \n\t" \
5677 "lw $4, 24(%1) \n\t" \
5678 "sw $4, 20($29) \n\t" \
5679 "lw $4, 28(%1) \n\t" \
5680 "sw $4, 24($29) \n\t" \
5681 "lw $4, 32(%1) \n\t" \
5682 "sw $4, 28($29) \n\t" \
5683 "lw $4, 36(%1) \n\t" \
5684 "sw $4, 32($29) \n\t" \
5685 "lw $4, 40(%1) \n\t" \
5686 "sw $4, 36($29) \n\t" \
5687 "lw $4, 44(%1) \n\t" \
5688 "sw $4, 40($29) \n\t" \
5689 "lw $4, 48(%1) \n\t" \
5690 "sw $4, 44($29) \n\t" \
5691 "lw $4, 4(%1) \n\t" \
5692 "lw $5, 8(%1) \n\t" \
5693 "lw $6, 12(%1) \n\t" \
5694 "lw $7, 16(%1) \n\t" \
5695 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5696 VALGRIND_CALL_NOREDIR_T9 \
5697 "addu $29, $29, 56 \n\t" \
5698 "lw $28, 0($29) \n\t" \
5699 "lw $31, 4($29) \n\t" \
5700 "addu $29, $29, 8 \n\t" \
5701 "move %0, $2\n" \
5702 : /*out*/ "=r" (_res) \
5703 : /*in*/ "r" (&_argvec[0]) \
5704 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5705 ); \
5706 lval = (__typeof__(lval)) _res; \
5707 } while (0)
5708
5709#endif /* PLAT_mips32_linux */
5710
5711/* ------------------------- nanomips-linux -------------------- */
5712
5713#if defined(PLAT_nanomips_linux)
5714
5715/* These regs are trashed by the hidden call. */
5716#define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5717"$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5718"$t8","$t9", "$at"
5719
5720/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5721 long) == 4. */
5722
5723#define CALL_FN_W_v(lval, orig) \
5724 do { \
5725 volatile OrigFn _orig = (orig); \
5726 volatile unsigned long _argvec[1]; \
5727 volatile unsigned long _res; \
5728 _argvec[0] = (unsigned long)_orig.nraddr; \
5729 __asm__ volatile( \
5730 "lw $t9, 0(%1)\n\t" \
5731 VALGRIND_CALL_NOREDIR_T9 \
5732 "move %0, $a0\n" \
5733 : /*out*/ "=r" (_res) \
5734 : /*in*/ "r" (&_argvec[0]) \
5735 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5736 ); \
5737 lval = (__typeof__(lval)) _res; \
5738 } while (0)
5739
5740#define CALL_FN_W_W(lval, orig, arg1) \
5741 do { \
5742 volatile OrigFn _orig = (orig); \
5743 volatile unsigned long _argvec[2]; \
5744 volatile unsigned long _res; \
5745 _argvec[0] = (unsigned long)_orig.nraddr; \
5746 _argvec[1] = (unsigned long)(arg1); \
5747 __asm__ volatile( \
5748 "lw $t9, 0(%1)\n\t" \
5749 "lw $a0, 4(%1)\n\t" \
5750 VALGRIND_CALL_NOREDIR_T9 \
5751 "move %0, $a0\n" \
5752 : /*out*/ "=r" (_res) \
5753 : /*in*/ "r" (&_argvec[0]) \
5754 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5755 ); \
5756 lval = (__typeof__(lval)) _res; \
5757 } while (0)
5758
5759#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5760 do { \
5761 volatile OrigFn _orig = (orig); \
5762 volatile unsigned long _argvec[3]; \
5763 volatile unsigned long _res; \
5764 _argvec[0] = (unsigned long)_orig.nraddr; \
5765 _argvec[1] = (unsigned long)(arg1); \
5766 _argvec[2] = (unsigned long)(arg2); \
5767 __asm__ volatile( \
5768 "lw $t9, 0(%1)\n\t" \
5769 "lw $a0, 4(%1)\n\t" \
5770 "lw $a1, 8(%1)\n\t" \
5771 VALGRIND_CALL_NOREDIR_T9 \
5772 "move %0, $a0\n" \
5773 : /*out*/ "=r" (_res) \
5774 : /*in*/ "r" (&_argvec[0]) \
5775 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5776 ); \
5777 lval = (__typeof__(lval)) _res; \
5778 } while (0)
5779
5780#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5781 do { \
5782 volatile OrigFn _orig = (orig); \
5783 volatile unsigned long _argvec[4]; \
5784 volatile unsigned long _res; \
5785 _argvec[0] = (unsigned long)_orig.nraddr; \
5786 _argvec[1] = (unsigned long)(arg1); \
5787 _argvec[2] = (unsigned long)(arg2); \
5788 _argvec[3] = (unsigned long)(arg3); \
5789 __asm__ volatile( \
5790 "lw $t9, 0(%1)\n\t" \
5791 "lw $a0, 4(%1)\n\t" \
5792 "lw $a1, 8(%1)\n\t" \
5793 "lw $a2,12(%1)\n\t" \
5794 VALGRIND_CALL_NOREDIR_T9 \
5795 "move %0, $a0\n" \
5796 : /*out*/ "=r" (_res) \
5797 : /*in*/ "r" (&_argvec[0]) \
5798 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5799 ); \
5800 lval = (__typeof__(lval)) _res; \
5801 } while (0)
5802
5803#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5804 do { \
5805 volatile OrigFn _orig = (orig); \
5806 volatile unsigned long _argvec[5]; \
5807 volatile unsigned long _res; \
5808 _argvec[0] = (unsigned long)_orig.nraddr; \
5809 _argvec[1] = (unsigned long)(arg1); \
5810 _argvec[2] = (unsigned long)(arg2); \
5811 _argvec[3] = (unsigned long)(arg3); \
5812 _argvec[4] = (unsigned long)(arg4); \
5813 __asm__ volatile( \
5814 "lw $t9, 0(%1)\n\t" \
5815 "lw $a0, 4(%1)\n\t" \
5816 "lw $a1, 8(%1)\n\t" \
5817 "lw $a2,12(%1)\n\t" \
5818 "lw $a3,16(%1)\n\t" \
5819 VALGRIND_CALL_NOREDIR_T9 \
5820 "move %0, $a0\n" \
5821 : /*out*/ "=r" (_res) \
5822 : /*in*/ "r" (&_argvec[0]) \
5823 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5824 ); \
5825 lval = (__typeof__(lval)) _res; \
5826 } while (0)
5827
5828#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5829 do { \
5830 volatile OrigFn _orig = (orig); \
5831 volatile unsigned long _argvec[6]; \
5832 volatile unsigned long _res; \
5833 _argvec[0] = (unsigned long)_orig.nraddr; \
5834 _argvec[1] = (unsigned long)(arg1); \
5835 _argvec[2] = (unsigned long)(arg2); \
5836 _argvec[3] = (unsigned long)(arg3); \
5837 _argvec[4] = (unsigned long)(arg4); \
5838 _argvec[5] = (unsigned long)(arg5); \
5839 __asm__ volatile( \
5840 "lw $t9, 0(%1)\n\t" \
5841 "lw $a0, 4(%1)\n\t" \
5842 "lw $a1, 8(%1)\n\t" \
5843 "lw $a2,12(%1)\n\t" \
5844 "lw $a3,16(%1)\n\t" \
5845 "lw $a4,20(%1)\n\t" \
5846 VALGRIND_CALL_NOREDIR_T9 \
5847 "move %0, $a0\n" \
5848 : /*out*/ "=r" (_res) \
5849 : /*in*/ "r" (&_argvec[0]) \
5850 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5851 ); \
5852 lval = (__typeof__(lval)) _res; \
5853 } while (0)
5854#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5855 do { \
5856 volatile OrigFn _orig = (orig); \
5857 volatile unsigned long _argvec[7]; \
5858 volatile unsigned long _res; \
5859 _argvec[0] = (unsigned long)_orig.nraddr; \
5860 _argvec[1] = (unsigned long)(arg1); \
5861 _argvec[2] = (unsigned long)(arg2); \
5862 _argvec[3] = (unsigned long)(arg3); \
5863 _argvec[4] = (unsigned long)(arg4); \
5864 _argvec[5] = (unsigned long)(arg5); \
5865 _argvec[6] = (unsigned long)(arg6); \
5866 __asm__ volatile( \
5867 "lw $t9, 0(%1)\n\t" \
5868 "lw $a0, 4(%1)\n\t" \
5869 "lw $a1, 8(%1)\n\t" \
5870 "lw $a2,12(%1)\n\t" \
5871 "lw $a3,16(%1)\n\t" \
5872 "lw $a4,20(%1)\n\t" \
5873 "lw $a5,24(%1)\n\t" \
5874 VALGRIND_CALL_NOREDIR_T9 \
5875 "move %0, $a0\n" \
5876 : /*out*/ "=r" (_res) \
5877 : /*in*/ "r" (&_argvec[0]) \
5878 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5879 ); \
5880 lval = (__typeof__(lval)) _res; \
5881 } while (0)
5882
5883#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5884 arg7) \
5885 do { \
5886 volatile OrigFn _orig = (orig); \
5887 volatile unsigned long _argvec[8]; \
5888 volatile unsigned long _res; \
5889 _argvec[0] = (unsigned long)_orig.nraddr; \
5890 _argvec[1] = (unsigned long)(arg1); \
5891 _argvec[2] = (unsigned long)(arg2); \
5892 _argvec[3] = (unsigned long)(arg3); \
5893 _argvec[4] = (unsigned long)(arg4); \
5894 _argvec[5] = (unsigned long)(arg5); \
5895 _argvec[6] = (unsigned long)(arg6); \
5896 _argvec[7] = (unsigned long)(arg7); \
5897 __asm__ volatile( \
5898 "lw $t9, 0(%1)\n\t" \
5899 "lw $a0, 4(%1)\n\t" \
5900 "lw $a1, 8(%1)\n\t" \
5901 "lw $a2,12(%1)\n\t" \
5902 "lw $a3,16(%1)\n\t" \
5903 "lw $a4,20(%1)\n\t" \
5904 "lw $a5,24(%1)\n\t" \
5905 "lw $a6,28(%1)\n\t" \
5906 VALGRIND_CALL_NOREDIR_T9 \
5907 "move %0, $a0\n" \
5908 : /*out*/ "=r" (_res) \
5909 : /*in*/ "r" (&_argvec[0]) \
5910 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5911 ); \
5912 lval = (__typeof__(lval)) _res; \
5913 } while (0)
5914
5915#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5916 arg7,arg8) \
5917 do { \
5918 volatile OrigFn _orig = (orig); \
5919 volatile unsigned long _argvec[9]; \
5920 volatile unsigned long _res; \
5921 _argvec[0] = (unsigned long)_orig.nraddr; \
5922 _argvec[1] = (unsigned long)(arg1); \
5923 _argvec[2] = (unsigned long)(arg2); \
5924 _argvec[3] = (unsigned long)(arg3); \
5925 _argvec[4] = (unsigned long)(arg4); \
5926 _argvec[5] = (unsigned long)(arg5); \
5927 _argvec[6] = (unsigned long)(arg6); \
5928 _argvec[7] = (unsigned long)(arg7); \
5929 _argvec[8] = (unsigned long)(arg8); \
5930 __asm__ volatile( \
5931 "lw $t9, 0(%1)\n\t" \
5932 "lw $a0, 4(%1)\n\t" \
5933 "lw $a1, 8(%1)\n\t" \
5934 "lw $a2,12(%1)\n\t" \
5935 "lw $a3,16(%1)\n\t" \
5936 "lw $a4,20(%1)\n\t" \
5937 "lw $a5,24(%1)\n\t" \
5938 "lw $a6,28(%1)\n\t" \
5939 "lw $a7,32(%1)\n\t" \
5940 VALGRIND_CALL_NOREDIR_T9 \
5941 "move %0, $a0\n" \
5942 : /*out*/ "=r" (_res) \
5943 : /*in*/ "r" (&_argvec[0]) \
5944 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5945 ); \
5946 lval = (__typeof__(lval)) _res; \
5947 } while (0)
5948
5949#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5950 arg7,arg8,arg9) \
5951 do { \
5952 volatile OrigFn _orig = (orig); \
5953 volatile unsigned long _argvec[10]; \
5954 volatile unsigned long _res; \
5955 _argvec[0] = (unsigned long)_orig.nraddr; \
5956 _argvec[1] = (unsigned long)(arg1); \
5957 _argvec[2] = (unsigned long)(arg2); \
5958 _argvec[3] = (unsigned long)(arg3); \
5959 _argvec[4] = (unsigned long)(arg4); \
5960 _argvec[5] = (unsigned long)(arg5); \
5961 _argvec[6] = (unsigned long)(arg6); \
5962 _argvec[7] = (unsigned long)(arg7); \
5963 _argvec[8] = (unsigned long)(arg8); \
5964 _argvec[9] = (unsigned long)(arg9); \
5965 __asm__ volatile( \
5966 "addiu $sp, $sp, -16 \n\t" \
5967 "lw $t9,36(%1) \n\t" \
5968 "sw $t9, 0($sp) \n\t" \
5969 "lw $t9, 0(%1) \n\t" \
5970 "lw $a0, 4(%1) \n\t" \
5971 "lw $a1, 8(%1) \n\t" \
5972 "lw $a2,12(%1) \n\t" \
5973 "lw $a3,16(%1) \n\t" \
5974 "lw $a4,20(%1) \n\t" \
5975 "lw $a5,24(%1) \n\t" \
5976 "lw $a6,28(%1) \n\t" \
5977 "lw $a7,32(%1) \n\t" \
5978 VALGRIND_CALL_NOREDIR_T9 \
5979 "move %0, $a0 \n\t" \
5980 "addiu $sp, $sp, 16 \n\t" \
5981 : /*out*/ "=r" (_res) \
5982 : /*in*/ "r" (&_argvec[0]) \
5983 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5984 ); \
5985 lval = (__typeof__(lval)) _res; \
5986 } while (0)
5987
5988#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5989 arg7,arg8,arg9,arg10) \
5990 do { \
5991 volatile OrigFn _orig = (orig); \
5992 volatile unsigned long _argvec[11]; \
5993 volatile unsigned long _res; \
5994 _argvec[0] = (unsigned long)_orig.nraddr; \
5995 _argvec[1] = (unsigned long)(arg1); \
5996 _argvec[2] = (unsigned long)(arg2); \
5997 _argvec[3] = (unsigned long)(arg3); \
5998 _argvec[4] = (unsigned long)(arg4); \
5999 _argvec[5] = (unsigned long)(arg5); \
6000 _argvec[6] = (unsigned long)(arg6); \
6001 _argvec[7] = (unsigned long)(arg7); \
6002 _argvec[8] = (unsigned long)(arg8); \
6003 _argvec[9] = (unsigned long)(arg9); \
6004 _argvec[10] = (unsigned long)(arg10); \
6005 __asm__ volatile( \
6006 "addiu $sp, $sp, -16 \n\t" \
6007 "lw $t9,36(%1) \n\t" \
6008 "sw $t9, 0($sp) \n\t" \
6009 "lw $t9,40(%1) \n\t" \
6010 "sw $t9, 4($sp) \n\t" \
6011 "lw $t9, 0(%1) \n\t" \
6012 "lw $a0, 4(%1) \n\t" \
6013 "lw $a1, 8(%1) \n\t" \
6014 "lw $a2,12(%1) \n\t" \
6015 "lw $a3,16(%1) \n\t" \
6016 "lw $a4,20(%1) \n\t" \
6017 "lw $a5,24(%1) \n\t" \
6018 "lw $a6,28(%1) \n\t" \
6019 "lw $a7,32(%1) \n\t" \
6020 VALGRIND_CALL_NOREDIR_T9 \
6021 "move %0, $a0 \n\t" \
6022 "addiu $sp, $sp, 16 \n\t" \
6023 : /*out*/ "=r" (_res) \
6024 : /*in*/ "r" (&_argvec[0]) \
6025 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6026 ); \
6027 lval = (__typeof__(lval)) _res; \
6028 } while (0)
6029
6030#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6031 arg6,arg7,arg8,arg9,arg10, \
6032 arg11) \
6033 do { \
6034 volatile OrigFn _orig = (orig); \
6035 volatile unsigned long _argvec[12]; \
6036 volatile unsigned long _res; \
6037 _argvec[0] = (unsigned long)_orig.nraddr; \
6038 _argvec[1] = (unsigned long)(arg1); \
6039 _argvec[2] = (unsigned long)(arg2); \
6040 _argvec[3] = (unsigned long)(arg3); \
6041 _argvec[4] = (unsigned long)(arg4); \
6042 _argvec[5] = (unsigned long)(arg5); \
6043 _argvec[6] = (unsigned long)(arg6); \
6044 _argvec[7] = (unsigned long)(arg7); \
6045 _argvec[8] = (unsigned long)(arg8); \
6046 _argvec[9] = (unsigned long)(arg9); \
6047 _argvec[10] = (unsigned long)(arg10); \
6048 _argvec[11] = (unsigned long)(arg11); \
6049 __asm__ volatile( \
6050 "addiu $sp, $sp, -16 \n\t" \
6051 "lw $t9,36(%1) \n\t" \
6052 "sw $t9, 0($sp) \n\t" \
6053 "lw $t9,40(%1) \n\t" \
6054 "sw $t9, 4($sp) \n\t" \
6055 "lw $t9,44(%1) \n\t" \
6056 "sw $t9, 8($sp) \n\t" \
6057 "lw $t9, 0(%1) \n\t" \
6058 "lw $a0, 4(%1) \n\t" \
6059 "lw $a1, 8(%1) \n\t" \
6060 "lw $a2,12(%1) \n\t" \
6061 "lw $a3,16(%1) \n\t" \
6062 "lw $a4,20(%1) \n\t" \
6063 "lw $a5,24(%1) \n\t" \
6064 "lw $a6,28(%1) \n\t" \
6065 "lw $a7,32(%1) \n\t" \
6066 VALGRIND_CALL_NOREDIR_T9 \
6067 "move %0, $a0 \n\t" \
6068 "addiu $sp, $sp, 16 \n\t" \
6069 : /*out*/ "=r" (_res) \
6070 : /*in*/ "r" (&_argvec[0]) \
6071 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6072 ); \
6073 lval = (__typeof__(lval)) _res; \
6074 } while (0)
6075
6076#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6077 arg6,arg7,arg8,arg9,arg10, \
6078 arg11,arg12) \
6079 do { \
6080 volatile OrigFn _orig = (orig); \
6081 volatile unsigned long _argvec[13]; \
6082 volatile unsigned long _res; \
6083 _argvec[0] = (unsigned long)_orig.nraddr; \
6084 _argvec[1] = (unsigned long)(arg1); \
6085 _argvec[2] = (unsigned long)(arg2); \
6086 _argvec[3] = (unsigned long)(arg3); \
6087 _argvec[4] = (unsigned long)(arg4); \
6088 _argvec[5] = (unsigned long)(arg5); \
6089 _argvec[6] = (unsigned long)(arg6); \
6090 _argvec[7] = (unsigned long)(arg7); \
6091 _argvec[8] = (unsigned long)(arg8); \
6092 _argvec[9] = (unsigned long)(arg9); \
6093 _argvec[10] = (unsigned long)(arg10); \
6094 _argvec[11] = (unsigned long)(arg11); \
6095 _argvec[12] = (unsigned long)(arg12); \
6096 __asm__ volatile( \
6097 "addiu $sp, $sp, -16 \n\t" \
6098 "lw $t9,36(%1) \n\t" \
6099 "sw $t9, 0($sp) \n\t" \
6100 "lw $t9,40(%1) \n\t" \
6101 "sw $t9, 4($sp) \n\t" \
6102 "lw $t9,44(%1) \n\t" \
6103 "sw $t9, 8($sp) \n\t" \
6104 "lw $t9,48(%1) \n\t" \
6105 "sw $t9,12($sp) \n\t" \
6106 "lw $t9, 0(%1) \n\t" \
6107 "lw $a0, 4(%1) \n\t" \
6108 "lw $a1, 8(%1) \n\t" \
6109 "lw $a2,12(%1) \n\t" \
6110 "lw $a3,16(%1) \n\t" \
6111 "lw $a4,20(%1) \n\t" \
6112 "lw $a5,24(%1) \n\t" \
6113 "lw $a6,28(%1) \n\t" \
6114 "lw $a7,32(%1) \n\t" \
6115 VALGRIND_CALL_NOREDIR_T9 \
6116 "move %0, $a0 \n\t" \
6117 "addiu $sp, $sp, 16 \n\t" \
6118 : /*out*/ "=r" (_res) \
6119 : /*in*/ "r" (&_argvec[0]) \
6120 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6121 ); \
6122 lval = (__typeof__(lval)) _res; \
6123 } while (0)
6124
6125#endif /* PLAT_nanomips_linux */
6126
6127/* ------------------------- mips64-linux ------------------------- */
6128
6129#if defined(PLAT_mips64_linux)
6130
6131/* These regs are trashed by the hidden call. */
6132#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6133"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6134"$25", "$31"
6135
6136/* These CALL_FN_ macros assume that on mips64-linux,
6137 sizeof(long long) == 8. */
6138
6139#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6140
6141#define CALL_FN_W_v(lval, orig) \
6142 do { \
6143 volatile OrigFn _orig = (orig); \
6144 volatile unsigned long long _argvec[1]; \
6145 volatile unsigned long long _res; \
6146 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6147 __asm__ volatile( \
6148 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6149 VALGRIND_CALL_NOREDIR_T9 \
6150 "move %0, $2\n" \
6151 : /*out*/ "=r" (_res) \
6152 : /*in*/ "0" (&_argvec[0]) \
6153 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6154 ); \
6155 lval = (__typeof__(lval)) (long)_res; \
6156 } while (0)
6157
6158#define CALL_FN_W_W(lval, orig, arg1) \
6159 do { \
6160 volatile OrigFn _orig = (orig); \
6161 volatile unsigned long long _argvec[2]; \
6162 volatile unsigned long long _res; \
6163 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6164 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6165 __asm__ volatile( \
6166 "ld $4, 8(%1)\n\t" /* arg1*/ \
6167 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6168 VALGRIND_CALL_NOREDIR_T9 \
6169 "move %0, $2\n" \
6170 : /*out*/ "=r" (_res) \
6171 : /*in*/ "r" (&_argvec[0]) \
6172 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6173 ); \
6174 lval = (__typeof__(lval)) (long)_res; \
6175 } while (0)
6176
6177#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6178 do { \
6179 volatile OrigFn _orig = (orig); \
6180 volatile unsigned long long _argvec[3]; \
6181 volatile unsigned long long _res; \
6182 _argvec[0] = _orig.nraddr; \
6183 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6184 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6185 __asm__ volatile( \
6186 "ld $4, 8(%1)\n\t" \
6187 "ld $5, 16(%1)\n\t" \
6188 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6189 VALGRIND_CALL_NOREDIR_T9 \
6190 "move %0, $2\n" \
6191 : /*out*/ "=r" (_res) \
6192 : /*in*/ "r" (&_argvec[0]) \
6193 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6194 ); \
6195 lval = (__typeof__(lval)) (long)_res; \
6196 } while (0)
6197
6198
6199#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6200 do { \
6201 volatile OrigFn _orig = (orig); \
6202 volatile unsigned long long _argvec[4]; \
6203 volatile unsigned long long _res; \
6204 _argvec[0] = _orig.nraddr; \
6205 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6206 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6207 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6208 __asm__ volatile( \
6209 "ld $4, 8(%1)\n\t" \
6210 "ld $5, 16(%1)\n\t" \
6211 "ld $6, 24(%1)\n\t" \
6212 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6213 VALGRIND_CALL_NOREDIR_T9 \
6214 "move %0, $2\n" \
6215 : /*out*/ "=r" (_res) \
6216 : /*in*/ "r" (&_argvec[0]) \
6217 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6218 ); \
6219 lval = (__typeof__(lval)) (long)_res; \
6220 } while (0)
6221
6222#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6223 do { \
6224 volatile OrigFn _orig = (orig); \
6225 volatile unsigned long long _argvec[5]; \
6226 volatile unsigned long long _res; \
6227 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6228 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6229 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6230 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6231 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6232 __asm__ volatile( \
6233 "ld $4, 8(%1)\n\t" \
6234 "ld $5, 16(%1)\n\t" \
6235 "ld $6, 24(%1)\n\t" \
6236 "ld $7, 32(%1)\n\t" \
6237 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6238 VALGRIND_CALL_NOREDIR_T9 \
6239 "move %0, $2\n" \
6240 : /*out*/ "=r" (_res) \
6241 : /*in*/ "r" (&_argvec[0]) \
6242 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6243 ); \
6244 lval = (__typeof__(lval)) (long)_res; \
6245 } while (0)
6246
6247#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6248 do { \
6249 volatile OrigFn _orig = (orig); \
6250 volatile unsigned long long _argvec[6]; \
6251 volatile unsigned long long _res; \
6252 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6253 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6254 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6255 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6256 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6257 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6258 __asm__ volatile( \
6259 "ld $4, 8(%1)\n\t" \
6260 "ld $5, 16(%1)\n\t" \
6261 "ld $6, 24(%1)\n\t" \
6262 "ld $7, 32(%1)\n\t" \
6263 "ld $8, 40(%1)\n\t" \
6264 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6265 VALGRIND_CALL_NOREDIR_T9 \
6266 "move %0, $2\n" \
6267 : /*out*/ "=r" (_res) \
6268 : /*in*/ "r" (&_argvec[0]) \
6269 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6270 ); \
6271 lval = (__typeof__(lval)) (long)_res; \
6272 } while (0)
6273
6274#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6275 do { \
6276 volatile OrigFn _orig = (orig); \
6277 volatile unsigned long long _argvec[7]; \
6278 volatile unsigned long long _res; \
6279 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6280 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6281 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6282 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6283 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6284 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6285 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6286 __asm__ volatile( \
6287 "ld $4, 8(%1)\n\t" \
6288 "ld $5, 16(%1)\n\t" \
6289 "ld $6, 24(%1)\n\t" \
6290 "ld $7, 32(%1)\n\t" \
6291 "ld $8, 40(%1)\n\t" \
6292 "ld $9, 48(%1)\n\t" \
6293 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6294 VALGRIND_CALL_NOREDIR_T9 \
6295 "move %0, $2\n" \
6296 : /*out*/ "=r" (_res) \
6297 : /*in*/ "r" (&_argvec[0]) \
6298 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6299 ); \
6300 lval = (__typeof__(lval)) (long)_res; \
6301 } while (0)
6302
6303#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6304 arg7) \
6305 do { \
6306 volatile OrigFn _orig = (orig); \
6307 volatile unsigned long long _argvec[8]; \
6308 volatile unsigned long long _res; \
6309 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6310 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6311 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6312 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6313 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6314 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6315 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6316 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6317 __asm__ volatile( \
6318 "ld $4, 8(%1)\n\t" \
6319 "ld $5, 16(%1)\n\t" \
6320 "ld $6, 24(%1)\n\t" \
6321 "ld $7, 32(%1)\n\t" \
6322 "ld $8, 40(%1)\n\t" \
6323 "ld $9, 48(%1)\n\t" \
6324 "ld $10, 56(%1)\n\t" \
6325 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6326 VALGRIND_CALL_NOREDIR_T9 \
6327 "move %0, $2\n" \
6328 : /*out*/ "=r" (_res) \
6329 : /*in*/ "r" (&_argvec[0]) \
6330 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6331 ); \
6332 lval = (__typeof__(lval)) (long)_res; \
6333 } while (0)
6334
6335#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6336 arg7,arg8) \
6337 do { \
6338 volatile OrigFn _orig = (orig); \
6339 volatile unsigned long long _argvec[9]; \
6340 volatile unsigned long long _res; \
6341 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6342 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6343 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6344 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6345 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6346 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6347 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6348 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6349 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6350 __asm__ volatile( \
6351 "ld $4, 8(%1)\n\t" \
6352 "ld $5, 16(%1)\n\t" \
6353 "ld $6, 24(%1)\n\t" \
6354 "ld $7, 32(%1)\n\t" \
6355 "ld $8, 40(%1)\n\t" \
6356 "ld $9, 48(%1)\n\t" \
6357 "ld $10, 56(%1)\n\t" \
6358 "ld $11, 64(%1)\n\t" \
6359 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6360 VALGRIND_CALL_NOREDIR_T9 \
6361 "move %0, $2\n" \
6362 : /*out*/ "=r" (_res) \
6363 : /*in*/ "r" (&_argvec[0]) \
6364 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6365 ); \
6366 lval = (__typeof__(lval)) (long)_res; \
6367 } while (0)
6368
6369#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6370 arg7,arg8,arg9) \
6371 do { \
6372 volatile OrigFn _orig = (orig); \
6373 volatile unsigned long long _argvec[10]; \
6374 volatile unsigned long long _res; \
6375 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6376 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6377 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6378 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6379 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6380 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6381 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6382 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6383 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6384 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6385 __asm__ volatile( \
6386 "dsubu $29, $29, 8\n\t" \
6387 "ld $4, 72(%1)\n\t" \
6388 "sd $4, 0($29)\n\t" \
6389 "ld $4, 8(%1)\n\t" \
6390 "ld $5, 16(%1)\n\t" \
6391 "ld $6, 24(%1)\n\t" \
6392 "ld $7, 32(%1)\n\t" \
6393 "ld $8, 40(%1)\n\t" \
6394 "ld $9, 48(%1)\n\t" \
6395 "ld $10, 56(%1)\n\t" \
6396 "ld $11, 64(%1)\n\t" \
6397 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6398 VALGRIND_CALL_NOREDIR_T9 \
6399 "daddu $29, $29, 8\n\t" \
6400 "move %0, $2\n" \
6401 : /*out*/ "=r" (_res) \
6402 : /*in*/ "r" (&_argvec[0]) \
6403 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6404 ); \
6405 lval = (__typeof__(lval)) (long)_res; \
6406 } while (0)
6407
6408#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6409 arg7,arg8,arg9,arg10) \
6410 do { \
6411 volatile OrigFn _orig = (orig); \
6412 volatile unsigned long long _argvec[11]; \
6413 volatile unsigned long long _res; \
6414 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6415 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6416 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6417 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6418 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6419 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6420 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6421 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6422 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6423 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6424 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6425 __asm__ volatile( \
6426 "dsubu $29, $29, 16\n\t" \
6427 "ld $4, 72(%1)\n\t" \
6428 "sd $4, 0($29)\n\t" \
6429 "ld $4, 80(%1)\n\t" \
6430 "sd $4, 8($29)\n\t" \
6431 "ld $4, 8(%1)\n\t" \
6432 "ld $5, 16(%1)\n\t" \
6433 "ld $6, 24(%1)\n\t" \
6434 "ld $7, 32(%1)\n\t" \
6435 "ld $8, 40(%1)\n\t" \
6436 "ld $9, 48(%1)\n\t" \
6437 "ld $10, 56(%1)\n\t" \
6438 "ld $11, 64(%1)\n\t" \
6439 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6440 VALGRIND_CALL_NOREDIR_T9 \
6441 "daddu $29, $29, 16\n\t" \
6442 "move %0, $2\n" \
6443 : /*out*/ "=r" (_res) \
6444 : /*in*/ "r" (&_argvec[0]) \
6445 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6446 ); \
6447 lval = (__typeof__(lval)) (long)_res; \
6448 } while (0)
6449
6450#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6451 arg6,arg7,arg8,arg9,arg10, \
6452 arg11) \
6453 do { \
6454 volatile OrigFn _orig = (orig); \
6455 volatile unsigned long long _argvec[12]; \
6456 volatile unsigned long long _res; \
6457 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6458 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6459 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6460 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6461 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6462 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6463 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6464 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6465 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6466 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6467 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6468 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6469 __asm__ volatile( \
6470 "dsubu $29, $29, 24\n\t" \
6471 "ld $4, 72(%1)\n\t" \
6472 "sd $4, 0($29)\n\t" \
6473 "ld $4, 80(%1)\n\t" \
6474 "sd $4, 8($29)\n\t" \
6475 "ld $4, 88(%1)\n\t" \
6476 "sd $4, 16($29)\n\t" \
6477 "ld $4, 8(%1)\n\t" \
6478 "ld $5, 16(%1)\n\t" \
6479 "ld $6, 24(%1)\n\t" \
6480 "ld $7, 32(%1)\n\t" \
6481 "ld $8, 40(%1)\n\t" \
6482 "ld $9, 48(%1)\n\t" \
6483 "ld $10, 56(%1)\n\t" \
6484 "ld $11, 64(%1)\n\t" \
6485 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6486 VALGRIND_CALL_NOREDIR_T9 \
6487 "daddu $29, $29, 24\n\t" \
6488 "move %0, $2\n" \
6489 : /*out*/ "=r" (_res) \
6490 : /*in*/ "r" (&_argvec[0]) \
6491 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6492 ); \
6493 lval = (__typeof__(lval)) (long)_res; \
6494 } while (0)
6495
6496#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6497 arg6,arg7,arg8,arg9,arg10, \
6498 arg11,arg12) \
6499 do { \
6500 volatile OrigFn _orig = (orig); \
6501 volatile unsigned long long _argvec[13]; \
6502 volatile unsigned long long _res; \
6503 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6504 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6505 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6506 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6507 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6508 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6509 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6510 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6511 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6512 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6513 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6514 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6515 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6516 __asm__ volatile( \
6517 "dsubu $29, $29, 32\n\t" \
6518 "ld $4, 72(%1)\n\t" \
6519 "sd $4, 0($29)\n\t" \
6520 "ld $4, 80(%1)\n\t" \
6521 "sd $4, 8($29)\n\t" \
6522 "ld $4, 88(%1)\n\t" \
6523 "sd $4, 16($29)\n\t" \
6524 "ld $4, 96(%1)\n\t" \
6525 "sd $4, 24($29)\n\t" \
6526 "ld $4, 8(%1)\n\t" \
6527 "ld $5, 16(%1)\n\t" \
6528 "ld $6, 24(%1)\n\t" \
6529 "ld $7, 32(%1)\n\t" \
6530 "ld $8, 40(%1)\n\t" \
6531 "ld $9, 48(%1)\n\t" \
6532 "ld $10, 56(%1)\n\t" \
6533 "ld $11, 64(%1)\n\t" \
6534 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6535 VALGRIND_CALL_NOREDIR_T9 \
6536 "daddu $29, $29, 32\n\t" \
6537 "move %0, $2\n" \
6538 : /*out*/ "=r" (_res) \
6539 : /*in*/ "r" (&_argvec[0]) \
6540 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6541 ); \
6542 lval = (__typeof__(lval)) (long)_res; \
6543 } while (0)
6544
6545#endif /* PLAT_mips64_linux */
6546
6547/* ------------------------------------------------------------------ */
6548/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6549/* */
6550/* ------------------------------------------------------------------ */
6551
6552/* Some request codes. There are many more of these, but most are not
6553 exposed to end-user view. These are the public ones, all of the
6554 form 0x1000 + small_number.
6555
6556 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6557 ones start at 0x2000.
6558*/
6559
6560/* These macros are used by tools -- they must be public, but don't
6561 embed them into other programs. */
6562#define VG_USERREQ_TOOL_BASE(a,b) \
6563 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6564#define VG_IS_TOOL_USERREQ(a, b, v) \
6565 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6566
6567/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6568 This enum comprises an ABI exported by Valgrind to programs
6569 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6570 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6571 relevant group. */
6572typedef
6573 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6574 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6575
6576 /* These allow any function to be called from the simulated
6577 CPU but run on the real CPU. Nb: the first arg passed to
6578 the function is always the ThreadId of the running
6579 thread! So CLIENT_CALL0 actually requires a 1 arg
6580 function, etc. */
6581 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6582 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6583 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6584 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6585
6586 /* Can be useful in regression testing suites -- eg. can
6587 send Valgrind's output to /dev/null and still count
6588 errors. */
6589 VG_USERREQ__COUNT_ERRORS = 0x1201,
6590
6591 /* Allows the client program and/or gdbserver to execute a monitor
6592 command. */
6593 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6594
6595 /* Allows the client program to change a dynamic command line
6596 option. */
6597 VG_USERREQ__CLO_CHANGE = 0x1203,
6598
6599 /* These are useful and can be interpreted by any tool that
6600 tracks malloc() et al, by using vg_replace_malloc.c. */
6601 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6602 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6603 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6604 /* Memory pool support. */
6605 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6606 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6607 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6608 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6609 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6610 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6611 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6612 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6613
6614 /* Allow printfs to valgrind log. */
6615 /* The first two pass the va_list argument by value, which
6616 assumes it is the same size as or smaller than a UWord,
6617 which generally isn't the case. Hence are deprecated.
6618 The second two pass the vargs by reference and so are
6619 immune to this problem. */
6620 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6621 VG_USERREQ__PRINTF = 0x1401,
6622 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6623 /* both :: char* fmt, va_list* vargs */
6624 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6625 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6626
6627 /* Stack support. */
6628 VG_USERREQ__STACK_REGISTER = 0x1501,
6629 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6630 VG_USERREQ__STACK_CHANGE = 0x1503,
6631
6632 /* Wine support */
6633 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6634
6635 /* Querying of debug info. */
6636 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6637
6638 /* Disable/enable error reporting level. Takes a single
6639 Word arg which is the delta to this thread's error
6640 disablement indicator. Hence 1 disables or further
6641 disables errors, and -1 moves back towards enablement.
6642 Other values are not allowed. */
6643 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6644
6645 /* Some requests used for Valgrind internal, such as
6646 self-test or self-hosting. */
6647 /* Initialise IR injection */
6648 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6649 /* Used by Inner Valgrind to inform Outer Valgrind where to
6650 find the list of inner guest threads */
6651 VG_USERREQ__INNER_THREADS = 0x1902
6652 } Vg_ClientRequest;
6653
6654#if !defined(__GNUC__)
6655# define __extension__ /* */
6656#endif
6657
6658
6659/* Returns the number of Valgrinds this code is running under. That
6660 is, 0 if running natively, 1 if running under Valgrind, 2 if
6661 running under Valgrind which is running under another Valgrind,
6662 etc. */
6663#define RUNNING_ON_VALGRIND \
6664 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6665 VG_USERREQ__RUNNING_ON_VALGRIND, \
6666 0, 0, 0, 0, 0) \
6667
6668
6669/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6670 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6671 since it provides a way to make sure valgrind will retranslate the
6672 invalidated area. Returns no value. */
6673#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6674 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6675 _qzz_addr, _qzz_len, 0, 0, 0)
6676
6677#define VALGRIND_INNER_THREADS(_qzz_addr) \
6678 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6679 _qzz_addr, 0, 0, 0, 0)
6680
6681
6682/* These requests are for getting Valgrind itself to print something.
6683 Possibly with a backtrace. This is a really ugly hack. The return value
6684 is the number of characters printed, excluding the "**<pid>** " part at the
6685 start and the backtrace (if present). */
6686
6687#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6688/* Modern GCC will optimize the static routine out if unused,
6689 and unused attribute will shut down warnings about it. */
6690static int VALGRIND_PRINTF(const char *format, ...)
6691 __attribute__((format(__printf__, 1, 2), __unused__));
6692#endif
6693static int
6694#if defined(_MSC_VER)
6695__inline
6696#endif
6697VALGRIND_PRINTF(const char *format, ...)
6698{
6699#if !IS_ENABLED(CONFIG_VALGRIND)
6700 (void)format;
6701 return 0;
6702#else /* CONFIG_VALGRIND */
6703#if defined(_MSC_VER) || defined(__MINGW64__)
6704 uintptr_t _qzz_res;
6705#else
6706 unsigned long _qzz_res;
6707#endif
6708 va_list vargs;
6709 va_start(vargs, format);
6710#if defined(_MSC_VER) || defined(__MINGW64__)
6711 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6712 VG_USERREQ__PRINTF_VALIST_BY_REF,
6713 (uintptr_t)format,
6714 (uintptr_t)&vargs,
6715 0, 0, 0);
6716#else
6717 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6718 VG_USERREQ__PRINTF_VALIST_BY_REF,
6719 (unsigned long)format,
6720 (unsigned long)&vargs,
6721 0, 0, 0);
6722#endif
6723 va_end(vargs);
6724 return (int)_qzz_res;
6725#endif /* CONFIG_VALGRIND */
6726}
6727
6728#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6729static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6730 __attribute__((format(__printf__, 1, 2), __unused__));
6731#endif
6732static int
6733#if defined(_MSC_VER)
6734__inline
6735#endif
6736VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6737{
6738#if !IS_ENABLED(CONFIG_VALGRIND)
6739 (void)format;
6740 return 0;
6741#else /* CONFIG_VALGRIND */
6742#if defined(_MSC_VER) || defined(__MINGW64__)
6743 uintptr_t _qzz_res;
6744#else
6745 unsigned long _qzz_res;
6746#endif
6747 va_list vargs;
6748 va_start(vargs, format);
6749#if defined(_MSC_VER) || defined(__MINGW64__)
6750 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6751 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6752 (uintptr_t)format,
6753 (uintptr_t)&vargs,
6754 0, 0, 0);
6755#else
6756 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6757 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6758 (unsigned long)format,
6759 (unsigned long)&vargs,
6760 0, 0, 0);
6761#endif
6762 va_end(vargs);
6763 return (int)_qzz_res;
6764#endif /* CONFIG_VALGRIND */
6765}
6766
6767
6768/* These requests allow control to move from the simulated CPU to the
6769 real CPU, calling an arbitrary function.
6770
6771 Note that the current ThreadId is inserted as the first argument.
6772 So this call:
6773
6774 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6775
6776 requires f to have this signature:
6777
6778 Word f(Word tid, Word arg1, Word arg2)
6779
6780 where "Word" is a word-sized type.
6781
6782 Note that these client requests are not entirely reliable. For example,
6783 if you call a function with them that subsequently calls printf(),
6784 there's a high chance Valgrind will crash. Generally, your prospects of
6785 these working are made higher if the called function does not refer to
6786 any global variables, and does not refer to any libc or other functions
6787 (printf et al). Any kind of entanglement with libc or dynamic linking is
6788 likely to have a bad outcome, for tricky reasons which we've grappled
6789 with a lot in the past.
6790*/
6791#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6792 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6793 VG_USERREQ__CLIENT_CALL0, \
6794 _qyy_fn, \
6795 0, 0, 0, 0)
6796
6797#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6798 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6799 VG_USERREQ__CLIENT_CALL1, \
6800 _qyy_fn, \
6801 _qyy_arg1, 0, 0, 0)
6802
6803#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6804 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6805 VG_USERREQ__CLIENT_CALL2, \
6806 _qyy_fn, \
6807 _qyy_arg1, _qyy_arg2, 0, 0)
6808
6809#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6810 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6811 VG_USERREQ__CLIENT_CALL3, \
6812 _qyy_fn, \
6813 _qyy_arg1, _qyy_arg2, \
6814 _qyy_arg3, 0)
6815
6816
6817/* Counts the number of errors that have been recorded by a tool. Nb:
6818 the tool must record the errors with VG_(maybe_record_error)() or
6819 VG_(unique_error)() for them to be counted. */
6820#define VALGRIND_COUNT_ERRORS \
6821 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6822 0 /* default return */, \
6823 VG_USERREQ__COUNT_ERRORS, \
6824 0, 0, 0, 0, 0)
6825
6826/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6827 when heap blocks are allocated in order to give accurate results. This
6828 happens automatically for the standard allocator functions such as
6829 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6830 delete[], etc.
6831
6832 But if your program uses a custom allocator, this doesn't automatically
6833 happen, and Valgrind will not do as well. For example, if you allocate
6834 superblocks with mmap() and then allocates chunks of the superblocks, all
6835 Valgrind's observations will be at the mmap() level and it won't know that
6836 the chunks should be considered separate entities. In Memcheck's case,
6837 that means you probably won't get heap block overrun detection (because
6838 there won't be redzones marked as unaddressable) and you definitely won't
6839 get any leak detection.
6840
6841 The following client requests allow a custom allocator to be annotated so
6842 that it can be handled accurately by Valgrind.
6843
6844 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6845 by a malloc()-like function. For Memcheck (an illustrative case), this
6846 does two things:
6847
6848 - It records that the block has been allocated. This means any addresses
6849 within the block mentioned in error messages will be
6850 identified as belonging to the block. It also means that if the block
6851 isn't freed it will be detected by the leak checker.
6852
6853 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6854 not set), or addressable and defined (if 'is_zeroed' is set). This
6855 controls how accesses to the block by the program are handled.
6856
6857 'addr' is the start of the usable block (ie. after any
6858 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6859 can apply redzones -- these are blocks of padding at the start and end of
6860 each block. Adding redzones is recommended as it makes it much more likely
6861 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6862 zeroed (or filled with another predictable value), as is the case for
6863 calloc().
6864
6865 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6866 heap block -- that will be used by the client program -- is allocated.
6867 It's best to put it at the outermost level of the allocator if possible;
6868 for example, if you have a function my_alloc() which calls
6869 internal_alloc(), and the client request is put inside internal_alloc(),
6870 stack traces relating to the heap block will contain entries for both
6871 my_alloc() and internal_alloc(), which is probably not what you want.
6872
6873 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6874 custom blocks from within a heap block, B, that has been allocated with
6875 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6876 -- the custom blocks will take precedence.
6877
6878 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6879 Memcheck, it does two things:
6880
6881 - It records that the block has been deallocated. This assumes that the
6882 block was annotated as having been allocated via
6883 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6884
6885 - It marks the block as being unaddressable.
6886
6887 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6888 heap block is deallocated.
6889
6890 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6891 Memcheck, it does four things:
6892
6893 - It records that the size of a block has been changed. This assumes that
6894 the block was annotated as having been allocated via
6895 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6896
6897 - If the block shrunk, it marks the freed memory as being unaddressable.
6898
6899 - If the block grew, it marks the new area as undefined and defines a red
6900 zone past the end of the new block.
6901
6902 - The V-bits of the overlap between the old and the new block are preserved.
6903
6904 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6905 and before deallocation of the old block.
6906
6907 In many cases, these three client requests will not be enough to get your
6908 allocator working well with Memcheck. More specifically, if your allocator
6909 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6910 will be necessary to mark the memory as addressable just before the zeroing
6911 occurs, otherwise you'll get a lot of invalid write errors. For example,
6912 you'll need to do this if your allocator recycles freed blocks, but it
6913 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6914 Alternatively, if your allocator reuses freed blocks for allocator-internal
6915 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6916
6917 Really, what's happening is a blurring of the lines between the client
6918 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6919 memory should be considered unaddressable to the client program, but the
6920 allocator knows more than the rest of the client program and so may be able
6921 to safely access it. Extra client requests are necessary for Valgrind to
6922 understand the distinction between the allocator and the rest of the
6923 program.
6924
6925 Ignored if addr == 0.
6926*/
6927#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6928 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6929 addr, sizeB, rzB, is_zeroed, 0)
6930
6931/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6932 Ignored if addr == 0.
6933*/
6934#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6935 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6936 addr, oldSizeB, newSizeB, rzB, 0)
6937
6938/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6939 Ignored if addr == 0.
6940*/
6941#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6942 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6943 addr, rzB, 0, 0, 0)
6944
6945/* Create a memory pool. */
6946#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6947 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6948 pool, rzB, is_zeroed, 0, 0)
6949
6950/* Create a memory pool with some flags specifying extended behaviour.
6951 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6952
6953 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6954 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6955 by the application as superblocks to dole out MALLOC_LIKE blocks using
6956 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6957 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6958 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6959 Note that the association between the pool and the second level blocks
6960 is implicit : second level blocks will be located inside first level
6961 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6962 for such 2 levels pools, as otherwise valgrind will detect overlapping
6963 memory blocks, and will abort execution (e.g. during leak search).
6964
6965 Such a meta pool can also be marked as an 'auto free' pool using the flag
6966 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6967 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6968 will automatically free the second level blocks that are contained
6969 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6970 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6971 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6972 in the first level block.
6973 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6974 without the VALGRIND_MEMPOOL_METAPOOL flag.
6975*/
6976#define VALGRIND_MEMPOOL_AUTO_FREE 1
6977#define VALGRIND_MEMPOOL_METAPOOL 2
6978#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6979 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6980 pool, rzB, is_zeroed, flags, 0)
6981
6982/* Destroy a memory pool. */
6983#define VALGRIND_DESTROY_MEMPOOL(pool) \
6984 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6985 pool, 0, 0, 0, 0)
6986
6987/* Associate a piece of memory with a memory pool. */
6988#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6989 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6990 pool, addr, size, 0, 0)
6991
6992/* Disassociate a piece of memory from a memory pool. */
6993#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6994 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6995 pool, addr, 0, 0, 0)
6996
6997/* Disassociate any pieces outside a particular range. */
6998#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6999 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7000 pool, addr, size, 0, 0)
7001
7002/* Resize and/or move a piece associated with a memory pool. */
7003#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7004 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7005 poolA, poolB, 0, 0, 0)
7006
7007/* Resize and/or move a piece associated with a memory pool. */
7008#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7009 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7010 pool, addrA, addrB, size, 0)
7011
7012/* Return 1 if a mempool exists, else 0. */
7013#define VALGRIND_MEMPOOL_EXISTS(pool) \
7014 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7015 VG_USERREQ__MEMPOOL_EXISTS, \
7016 pool, 0, 0, 0, 0)
7017
7018/* Mark a piece of memory as being a stack. Returns a stack id.
7019 start is the lowest addressable stack byte, end is the highest
7020 addressable stack byte. */
7021#define VALGRIND_STACK_REGISTER(start, end) \
7022 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7023 VG_USERREQ__STACK_REGISTER, \
7024 start, end, 0, 0, 0)
7025
7026/* Unmark the piece of memory associated with a stack id as being a
7027 stack. */
7028#define VALGRIND_STACK_DEREGISTER(id) \
7029 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7030 id, 0, 0, 0, 0)
7031
7032/* Change the start and end address of the stack id.
7033 start is the new lowest addressable stack byte, end is the new highest
7034 addressable stack byte. */
7035#define VALGRIND_STACK_CHANGE(id, start, end) \
7036 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7037 id, start, end, 0, 0)
7038
7039/* Load PDB debug info for Wine PE image_map. */
7040#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7041 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7042 fd, ptr, total_size, delta, 0)
7043
7044/* Map a code address to a source file name and line number. buf64
7045 must point to a 64-byte buffer in the caller's address space. The
7046 result will be dumped in there and is guaranteed to be zero
7047 terminated. If no info is found, the first byte is set to zero. */
7048#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7049 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7050 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7051 addr, buf64, 0, 0, 0)
7052
7053/* Disable error reporting for this thread. Behaves in a stack like
7054 way, so you can safely call this multiple times provided that
7055 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7056 to re-enable reporting. The first call of this macro disables
7057 reporting. Subsequent calls have no effect except to increase the
7058 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7059 reporting. Child threads do not inherit this setting from their
7060 parents -- they are always created with reporting enabled. */
7061#define VALGRIND_DISABLE_ERROR_REPORTING \
7062 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7063 1, 0, 0, 0, 0)
7064
7065/* Re-enable error reporting, as per comments on
7066 VALGRIND_DISABLE_ERROR_REPORTING. */
7067#define VALGRIND_ENABLE_ERROR_REPORTING \
7068 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7069 -1, 0, 0, 0, 0)
7070
7071/* Execute a monitor command from the client program.
7072 If a connection is opened with GDB, the output will be sent
7073 according to the output mode set for vgdb.
7074 If no connection is opened, output will go to the log output.
7075 Returns 1 if command not recognised, 0 otherwise. */
7076#define VALGRIND_MONITOR_COMMAND(command) \
7077 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7078 command, 0, 0, 0, 0)
7079
7080
7081/* Change the value of a dynamic command line option.
7082 Note that unknown or not dynamically changeable options
7083 will cause a warning message to be output. */
7084#define VALGRIND_CLO_CHANGE(option) \
7085 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7086 option, 0, 0, 0, 0)
7087
7088
7089#undef PLAT_x86_darwin
7090#undef PLAT_amd64_darwin
7091#undef PLAT_x86_win32
7092#undef PLAT_amd64_win64
7093#undef PLAT_x86_linux
7094#undef PLAT_amd64_linux
7095#undef PLAT_ppc32_linux
7096#undef PLAT_ppc64be_linux
7097#undef PLAT_ppc64le_linux
7098#undef PLAT_arm_linux
7099#undef PLAT_s390x_linux
7100#undef PLAT_mips32_linux
7101#undef PLAT_mips64_linux
7102#undef PLAT_nanomips_linux
7103#undef PLAT_x86_solaris
7104#undef PLAT_amd64_solaris
7105
7106#endif /* __VALGRIND_H */