blob: 6114c4a0e104f00e0f5d32d70a016941d91da701 [file] [log] [blame]
Sean Anderson3b004842022-03-23 14:04:48 -04001/* SPDX-License-Identifier: GPL-2.0+ AND bzip2-1.0.6 */
2/*
3 This file is part of Valgrind, a dynamic binary instrumentation
4 framework.
5
6 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
7 Copyright (C) 2021 Sean Anderson <seanga2@gmail.com>
8*/
9
10/* This file is for inclusion into client (your!) code.
11
12 You can use these macros to manipulate and query Valgrind's
13 execution inside your own programs.
14
15 The resulting executables will still run without Valgrind, just a
16 little bit more slowly than they otherwise would, but otherwise
17 unchanged. When not running on valgrind, each client request
18 consumes very few (eg. 7) instructions, so the resulting performance
19 loss is negligible unless you plan to execute client requests
20 millions of times per second. Nevertheless, if that is still a
21 problem, you can compile with the NVALGRIND symbol defined (gcc
22 -DNVALGRIND) so that client requests are not even compiled in. */
23
24#ifndef __VALGRIND_H
25#define __VALGRIND_H
26
Sean Anderson3b004842022-03-23 14:04:48 -040027/* ------------------------------------------------------------------ */
28/* VERSION NUMBER OF VALGRIND */
29/* ------------------------------------------------------------------ */
30
31/* Specify Valgrind's version number, so that user code can
32 conditionally compile based on our version number. Note that these
33 were introduced at version 3.6 and so do not exist in version 3.5
34 or earlier. The recommended way to use them to check for "version
35 X.Y or later" is (eg)
36
37#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
38 && (__VALGRIND_MAJOR__ > 3 \
39 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
40*/
41#define __VALGRIND_MAJOR__ 3
42#define __VALGRIND_MINOR__ 16
43
Sean Anderson3b004842022-03-23 14:04:48 -040044#include <stdarg.h>
45
46/* Nb: this file might be included in a file compiled with -ansi. So
47 we can't use C++ style "//" comments nor the "asm" keyword (instead
48 use "__asm__"). */
49
50/* Derive some tags indicating what the target platform is. Note
51 that in this file we're using the compiler's CPP symbols for
52 identifying architectures, which are different to the ones we use
53 within the rest of Valgrind. Note, __powerpc__ is active for both
54 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
55 latter (on Linux, that is).
56
57 Misc note: how to find out what's predefined in gcc by default:
58 gcc -Wp,-dM somefile.c
59*/
60#undef PLAT_x86_darwin
61#undef PLAT_amd64_darwin
62#undef PLAT_x86_win32
63#undef PLAT_amd64_win64
64#undef PLAT_x86_linux
65#undef PLAT_amd64_linux
66#undef PLAT_ppc32_linux
67#undef PLAT_ppc64be_linux
68#undef PLAT_ppc64le_linux
69#undef PLAT_arm_linux
70#undef PLAT_arm64_linux
71#undef PLAT_s390x_linux
72#undef PLAT_mips32_linux
73#undef PLAT_mips64_linux
74#undef PLAT_nanomips_linux
75#undef PLAT_x86_solaris
76#undef PLAT_amd64_solaris
77
Sean Anderson3b004842022-03-23 14:04:48 -040078#if defined(__APPLE__) && defined(__i386__)
79# define PLAT_x86_darwin 1
80#elif defined(__APPLE__) && defined(__x86_64__)
81# define PLAT_amd64_darwin 1
82#elif (defined(__MINGW32__) && defined(__i386__)) \
83 || defined(__CYGWIN32__) \
84 || (defined(_WIN32) && defined(_M_IX86))
85# define PLAT_x86_win32 1
86#elif (defined(__MINGW32__) && defined(__x86_64__)) \
87 || (defined(_WIN32) && defined(_M_X64))
88/* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
89# define PLAT_amd64_win64 1
90#elif defined(__linux__) && defined(__i386__)
91# define PLAT_x86_linux 1
92#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
93# define PLAT_amd64_linux 1
94#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
95# define PLAT_ppc32_linux 1
96#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
97/* Big Endian uses ELF version 1 */
98# define PLAT_ppc64be_linux 1
99#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
100/* Little Endian uses ELF version 2 */
101# define PLAT_ppc64le_linux 1
102#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
103# define PLAT_arm_linux 1
104#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
105# define PLAT_arm64_linux 1
106#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
107# define PLAT_s390x_linux 1
108#elif defined(__linux__) && defined(__mips__) && (__mips==64)
109# define PLAT_mips64_linux 1
110#elif defined(__linux__) && defined(__mips__) && (__mips==32)
111# define PLAT_mips32_linux 1
112#elif defined(__linux__) && defined(__nanomips__)
113# define PLAT_nanomips_linux 1
114#elif defined(__sun) && defined(__i386__)
115# define PLAT_x86_solaris 1
116#elif defined(__sun) && defined(__x86_64__)
117# define PLAT_amd64_solaris 1
118#else
119/* If we're not compiling for our target platform, don't generate
120 any inline asms. */
Tom Riniacff4182022-12-04 10:03:32 -0500121# if IS_ENABLED(CONFIG_VALGRIND)
122# error "Unsupported platform for valgrind"
123# endif
Sean Anderson3b004842022-03-23 14:04:48 -0400124#endif
125
Sean Anderson3b004842022-03-23 14:04:48 -0400126/* ------------------------------------------------------------------ */
127/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
128/* in here of use to end-users -- skip to the next section. */
129/* ------------------------------------------------------------------ */
130
131/*
132 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
133 * request. Accepts both pointers and integers as arguments.
134 *
135 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
136 * client request that does not return a value.
137
138 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
139 * client request and whose value equals the client request result. Accepts
140 * both pointers and integers as arguments. Note that such calls are not
141 * necessarily pure functions -- they may have side effects.
142 */
143
144#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
145 _zzq_request, _zzq_arg1, _zzq_arg2, \
146 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
147 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
148 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
149 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
150
151#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
152 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
153 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
154 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
155 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
156
157#if !IS_ENABLED(CONFIG_VALGRIND)
158
159/* Define NVALGRIND to completely remove the Valgrind magic sequence
160 from the compiled code (analogous to NDEBUG's effects on
161 assert()) */
162#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
163 _zzq_default, _zzq_request, \
164 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
165 (_zzq_default)
166
167#else /* ! CONFIG_VALGRIND */
168
169/* The following defines the magic code sequences which the JITter
170 spots and handles magically. Don't look too closely at them as
171 they will rot your brain.
172
173 The assembly code sequences for all architectures is in this one
174 file. This is because this file must be stand-alone, and we don't
175 want to have multiple files.
176
177 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
178 value gets put in the return slot, so that everything works when
179 this is executed not under Valgrind. Args are passed in a memory
180 block, and so there's no intrinsic limit to the number that could
181 be passed, but it's currently five.
182
183 The macro args are:
184 _zzq_rlval result lvalue
185 _zzq_default default value (result returned when running on real CPU)
186 _zzq_request request code
187 _zzq_arg1..5 request params
188
189 The other two macros are used to support function wrapping, and are
190 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
191 guest's NRADDR pseudo-register and whatever other information is
192 needed to safely run the call original from the wrapper: on
193 ppc64-linux, the R2 value at the divert point is also needed. This
194 information is abstracted into a user-visible type, OrigFn.
195
196 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
197 guest, but guarantees that the branch instruction will not be
198 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
199 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
200 complete inline asm, since it needs to be combined with more magic
201 inline asm stuff to be useful.
202*/
203
204/* ----------------- x86-{linux,darwin,solaris} ---------------- */
205
206#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
207 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
208 || defined(PLAT_x86_solaris)
209
210typedef
211 struct {
212 unsigned int nraddr; /* where's the code? */
213 }
214 OrigFn;
215
216#define __SPECIAL_INSTRUCTION_PREAMBLE \
217 "roll $3, %%edi ; roll $13, %%edi\n\t" \
218 "roll $29, %%edi ; roll $19, %%edi\n\t"
219
220#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
221 _zzq_default, _zzq_request, \
222 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
223 __extension__ \
224 ({volatile unsigned int _zzq_args[6]; \
225 volatile unsigned int _zzq_result; \
226 _zzq_args[0] = (unsigned int)(_zzq_request); \
227 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
228 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
229 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
230 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
231 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
232 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
233 /* %EDX = client_request ( %EAX ) */ \
234 "xchgl %%ebx,%%ebx" \
235 : "=d" (_zzq_result) \
236 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
237 : "cc", "memory" \
238 ); \
239 _zzq_result; \
240 })
241
242#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
243 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
244 volatile unsigned int __addr; \
245 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
246 /* %EAX = guest_NRADDR */ \
247 "xchgl %%ecx,%%ecx" \
248 : "=a" (__addr) \
249 : \
250 : "cc", "memory" \
251 ); \
252 _zzq_orig->nraddr = __addr; \
253 }
254
255#define VALGRIND_CALL_NOREDIR_EAX \
256 __SPECIAL_INSTRUCTION_PREAMBLE \
257 /* call-noredir *%EAX */ \
258 "xchgl %%edx,%%edx\n\t"
259
260#define VALGRIND_VEX_INJECT_IR() \
261 do { \
262 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
263 "xchgl %%edi,%%edi\n\t" \
264 : : : "cc", "memory" \
265 ); \
266 } while (0)
267
268#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
269 || PLAT_x86_solaris */
270
271/* ------------------------- x86-Win32 ------------------------- */
272
273#if defined(PLAT_x86_win32) && !defined(__GNUC__)
274
275typedef
276 struct {
277 unsigned int nraddr; /* where's the code? */
278 }
279 OrigFn;
280
281#if defined(_MSC_VER)
282
283#define __SPECIAL_INSTRUCTION_PREAMBLE \
284 __asm rol edi, 3 __asm rol edi, 13 \
285 __asm rol edi, 29 __asm rol edi, 19
286
287#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
288 _zzq_default, _zzq_request, \
289 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
290 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
291 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
292 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
293 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
294
295static __inline uintptr_t
296valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
297 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
298 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
299 uintptr_t _zzq_arg5)
300{
301 volatile uintptr_t _zzq_args[6];
302 volatile unsigned int _zzq_result;
303 _zzq_args[0] = (uintptr_t)(_zzq_request);
304 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
305 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
306 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
307 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
308 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
309 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
310 __SPECIAL_INSTRUCTION_PREAMBLE
311 /* %EDX = client_request ( %EAX ) */
312 __asm xchg ebx,ebx
313 __asm mov _zzq_result, edx
314 }
315 return _zzq_result;
316}
317
318#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
319 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
320 volatile unsigned int __addr; \
321 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
322 /* %EAX = guest_NRADDR */ \
323 __asm xchg ecx,ecx \
324 __asm mov __addr, eax \
325 } \
326 _zzq_orig->nraddr = __addr; \
327 }
328
329#define VALGRIND_CALL_NOREDIR_EAX ERROR
330
331#define VALGRIND_VEX_INJECT_IR() \
332 do { \
333 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
334 __asm xchg edi,edi \
335 } \
336 } while (0)
337
338#else
339#error Unsupported compiler.
340#endif
341
342#endif /* PLAT_x86_win32 */
343
344/* ----------------- amd64-{linux,darwin,solaris} --------------- */
345
346#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
347 || defined(PLAT_amd64_solaris) \
348 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
349
350typedef
351 struct {
352 unsigned long int nraddr; /* where's the code? */
353 }
354 OrigFn;
355
356#define __SPECIAL_INSTRUCTION_PREAMBLE \
357 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
358 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
359
360#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
361 _zzq_default, _zzq_request, \
362 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
363 __extension__ \
364 ({ volatile unsigned long int _zzq_args[6]; \
365 volatile unsigned long int _zzq_result; \
366 _zzq_args[0] = (unsigned long int)(_zzq_request); \
367 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
368 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
369 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
370 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
371 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
372 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
373 /* %RDX = client_request ( %RAX ) */ \
374 "xchgq %%rbx,%%rbx" \
375 : "=d" (_zzq_result) \
376 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
377 : "cc", "memory" \
378 ); \
379 _zzq_result; \
380 })
381
382#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
383 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
384 volatile unsigned long int __addr; \
385 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
386 /* %RAX = guest_NRADDR */ \
387 "xchgq %%rcx,%%rcx" \
388 : "=a" (__addr) \
389 : \
390 : "cc", "memory" \
391 ); \
392 _zzq_orig->nraddr = __addr; \
393 }
394
395#define VALGRIND_CALL_NOREDIR_RAX \
396 __SPECIAL_INSTRUCTION_PREAMBLE \
397 /* call-noredir *%RAX */ \
398 "xchgq %%rdx,%%rdx\n\t"
399
400#define VALGRIND_VEX_INJECT_IR() \
401 do { \
402 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
403 "xchgq %%rdi,%%rdi\n\t" \
404 : : : "cc", "memory" \
405 ); \
406 } while (0)
407
408#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
409
410/* ------------------------- amd64-Win64 ------------------------- */
411
412#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
413
414#error Unsupported compiler.
415
416#endif /* PLAT_amd64_win64 */
417
418/* ------------------------ ppc32-linux ------------------------ */
419
420#if defined(PLAT_ppc32_linux)
421
422typedef
423 struct {
424 unsigned int nraddr; /* where's the code? */
425 }
426 OrigFn;
427
428#define __SPECIAL_INSTRUCTION_PREAMBLE \
429 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
430 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
431
432#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
433 _zzq_default, _zzq_request, \
434 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
435 \
436 __extension__ \
437 ({ unsigned int _zzq_args[6]; \
438 unsigned int _zzq_result; \
439 unsigned int* _zzq_ptr; \
440 _zzq_args[0] = (unsigned int)(_zzq_request); \
441 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
442 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
443 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
444 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
445 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
446 _zzq_ptr = _zzq_args; \
447 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
448 "mr 4,%2\n\t" /*ptr*/ \
449 __SPECIAL_INSTRUCTION_PREAMBLE \
450 /* %R3 = client_request ( %R4 ) */ \
451 "or 1,1,1\n\t" \
452 "mr %0,3" /*result*/ \
453 : "=b" (_zzq_result) \
454 : "b" (_zzq_default), "b" (_zzq_ptr) \
455 : "cc", "memory", "r3", "r4"); \
456 _zzq_result; \
457 })
458
459#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
460 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
461 unsigned int __addr; \
462 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
463 /* %R3 = guest_NRADDR */ \
464 "or 2,2,2\n\t" \
465 "mr %0,3" \
466 : "=b" (__addr) \
467 : \
468 : "cc", "memory", "r3" \
469 ); \
470 _zzq_orig->nraddr = __addr; \
471 }
472
473#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
474 __SPECIAL_INSTRUCTION_PREAMBLE \
475 /* branch-and-link-to-noredir *%R11 */ \
476 "or 3,3,3\n\t"
477
478#define VALGRIND_VEX_INJECT_IR() \
479 do { \
480 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
481 "or 5,5,5\n\t" \
482 ); \
483 } while (0)
484
485#endif /* PLAT_ppc32_linux */
486
487/* ------------------------ ppc64-linux ------------------------ */
488
489#if defined(PLAT_ppc64be_linux)
490
491typedef
492 struct {
493 unsigned long int nraddr; /* where's the code? */
494 unsigned long int r2; /* what tocptr do we need? */
495 }
496 OrigFn;
497
498#define __SPECIAL_INSTRUCTION_PREAMBLE \
499 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
500 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
501
502#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
503 _zzq_default, _zzq_request, \
504 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
505 \
506 __extension__ \
507 ({ unsigned long int _zzq_args[6]; \
508 unsigned long int _zzq_result; \
509 unsigned long int* _zzq_ptr; \
510 _zzq_args[0] = (unsigned long int)(_zzq_request); \
511 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
512 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
513 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
514 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
515 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
516 _zzq_ptr = _zzq_args; \
517 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
518 "mr 4,%2\n\t" /*ptr*/ \
519 __SPECIAL_INSTRUCTION_PREAMBLE \
520 /* %R3 = client_request ( %R4 ) */ \
521 "or 1,1,1\n\t" \
522 "mr %0,3" /*result*/ \
523 : "=b" (_zzq_result) \
524 : "b" (_zzq_default), "b" (_zzq_ptr) \
525 : "cc", "memory", "r3", "r4"); \
526 _zzq_result; \
527 })
528
529#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
530 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
531 unsigned long int __addr; \
532 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
533 /* %R3 = guest_NRADDR */ \
534 "or 2,2,2\n\t" \
535 "mr %0,3" \
536 : "=b" (__addr) \
537 : \
538 : "cc", "memory", "r3" \
539 ); \
540 _zzq_orig->nraddr = __addr; \
541 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
542 /* %R3 = guest_NRADDR_GPR2 */ \
543 "or 4,4,4\n\t" \
544 "mr %0,3" \
545 : "=b" (__addr) \
546 : \
547 : "cc", "memory", "r3" \
548 ); \
549 _zzq_orig->r2 = __addr; \
550 }
551
552#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
553 __SPECIAL_INSTRUCTION_PREAMBLE \
554 /* branch-and-link-to-noredir *%R11 */ \
555 "or 3,3,3\n\t"
556
557#define VALGRIND_VEX_INJECT_IR() \
558 do { \
559 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
560 "or 5,5,5\n\t" \
561 ); \
562 } while (0)
563
564#endif /* PLAT_ppc64be_linux */
565
566#if defined(PLAT_ppc64le_linux)
567
568typedef
569 struct {
570 unsigned long int nraddr; /* where's the code? */
571 unsigned long int r2; /* what tocptr do we need? */
572 }
573 OrigFn;
574
575#define __SPECIAL_INSTRUCTION_PREAMBLE \
576 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
577 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
578
579#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
580 _zzq_default, _zzq_request, \
581 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
582 \
583 __extension__ \
584 ({ unsigned long int _zzq_args[6]; \
585 unsigned long int _zzq_result; \
586 unsigned long int* _zzq_ptr; \
587 _zzq_args[0] = (unsigned long int)(_zzq_request); \
588 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
589 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
590 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
591 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
592 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
593 _zzq_ptr = _zzq_args; \
594 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
595 "mr 4,%2\n\t" /*ptr*/ \
596 __SPECIAL_INSTRUCTION_PREAMBLE \
597 /* %R3 = client_request ( %R4 ) */ \
598 "or 1,1,1\n\t" \
599 "mr %0,3" /*result*/ \
600 : "=b" (_zzq_result) \
601 : "b" (_zzq_default), "b" (_zzq_ptr) \
602 : "cc", "memory", "r3", "r4"); \
603 _zzq_result; \
604 })
605
606#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
607 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
608 unsigned long int __addr; \
609 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
610 /* %R3 = guest_NRADDR */ \
611 "or 2,2,2\n\t" \
612 "mr %0,3" \
613 : "=b" (__addr) \
614 : \
615 : "cc", "memory", "r3" \
616 ); \
617 _zzq_orig->nraddr = __addr; \
618 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
619 /* %R3 = guest_NRADDR_GPR2 */ \
620 "or 4,4,4\n\t" \
621 "mr %0,3" \
622 : "=b" (__addr) \
623 : \
624 : "cc", "memory", "r3" \
625 ); \
626 _zzq_orig->r2 = __addr; \
627 }
628
629#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
630 __SPECIAL_INSTRUCTION_PREAMBLE \
631 /* branch-and-link-to-noredir *%R12 */ \
632 "or 3,3,3\n\t"
633
634#define VALGRIND_VEX_INJECT_IR() \
635 do { \
636 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
637 "or 5,5,5\n\t" \
638 ); \
639 } while (0)
640
641#endif /* PLAT_ppc64le_linux */
642
643/* ------------------------- arm-linux ------------------------- */
644
645#if defined(PLAT_arm_linux)
646
647typedef
648 struct {
649 unsigned int nraddr; /* where's the code? */
650 }
651 OrigFn;
652
653#define __SPECIAL_INSTRUCTION_PREAMBLE \
654 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
655 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
656
657#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
658 _zzq_default, _zzq_request, \
659 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
660 \
661 __extension__ \
662 ({volatile unsigned int _zzq_args[6]; \
663 volatile unsigned int _zzq_result; \
664 _zzq_args[0] = (unsigned int)(_zzq_request); \
665 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
666 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
667 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
668 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
669 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
670 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
671 "mov r4, %2\n\t" /*ptr*/ \
672 __SPECIAL_INSTRUCTION_PREAMBLE \
673 /* R3 = client_request ( R4 ) */ \
674 "orr r10, r10, r10\n\t" \
675 "mov %0, r3" /*result*/ \
676 : "=r" (_zzq_result) \
677 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
678 : "cc","memory", "r3", "r4"); \
679 _zzq_result; \
680 })
681
682#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
683 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
684 unsigned int __addr; \
685 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
686 /* R3 = guest_NRADDR */ \
687 "orr r11, r11, r11\n\t" \
688 "mov %0, r3" \
689 : "=r" (__addr) \
690 : \
691 : "cc", "memory", "r3" \
692 ); \
693 _zzq_orig->nraddr = __addr; \
694 }
695
696#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
697 __SPECIAL_INSTRUCTION_PREAMBLE \
698 /* branch-and-link-to-noredir *%R4 */ \
699 "orr r12, r12, r12\n\t"
700
701#define VALGRIND_VEX_INJECT_IR() \
702 do { \
703 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
704 "orr r9, r9, r9\n\t" \
705 : : : "cc", "memory" \
706 ); \
707 } while (0)
708
709#endif /* PLAT_arm_linux */
710
711/* ------------------------ arm64-linux ------------------------- */
712
713#if defined(PLAT_arm64_linux)
714
715typedef
716 struct {
717 unsigned long int nraddr; /* where's the code? */
718 }
719 OrigFn;
720
721#define __SPECIAL_INSTRUCTION_PREAMBLE \
722 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
723 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
724
725#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
726 _zzq_default, _zzq_request, \
727 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
728 \
729 __extension__ \
730 ({volatile unsigned long int _zzq_args[6]; \
731 volatile unsigned long int _zzq_result; \
732 _zzq_args[0] = (unsigned long int)(_zzq_request); \
733 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
734 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
735 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
736 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
737 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
738 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
739 "mov x4, %2\n\t" /*ptr*/ \
740 __SPECIAL_INSTRUCTION_PREAMBLE \
741 /* X3 = client_request ( X4 ) */ \
742 "orr x10, x10, x10\n\t" \
743 "mov %0, x3" /*result*/ \
744 : "=r" (_zzq_result) \
745 : "r" ((unsigned long int)(_zzq_default)), \
746 "r" (&_zzq_args[0]) \
747 : "cc","memory", "x3", "x4"); \
748 _zzq_result; \
749 })
750
751#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
752 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
753 unsigned long int __addr; \
754 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
755 /* X3 = guest_NRADDR */ \
756 "orr x11, x11, x11\n\t" \
757 "mov %0, x3" \
758 : "=r" (__addr) \
759 : \
760 : "cc", "memory", "x3" \
761 ); \
762 _zzq_orig->nraddr = __addr; \
763 }
764
765#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
766 __SPECIAL_INSTRUCTION_PREAMBLE \
767 /* branch-and-link-to-noredir X8 */ \
768 "orr x12, x12, x12\n\t"
769
770#define VALGRIND_VEX_INJECT_IR() \
771 do { \
772 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
773 "orr x9, x9, x9\n\t" \
774 : : : "cc", "memory" \
775 ); \
776 } while (0)
777
778#endif /* PLAT_arm64_linux */
779
780/* ------------------------ s390x-linux ------------------------ */
781
782#if defined(PLAT_s390x_linux)
783
784typedef
785 struct {
786 unsigned long int nraddr; /* where's the code? */
787 }
788 OrigFn;
789
790/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
791 * code. This detection is implemented in platform specific toIR.c
792 * (e.g. VEX/priv/guest_s390_decoder.c).
793 */
794#define __SPECIAL_INSTRUCTION_PREAMBLE \
795 "lr 15,15\n\t" \
796 "lr 1,1\n\t" \
797 "lr 2,2\n\t" \
798 "lr 3,3\n\t"
799
800#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
801#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
802#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
803#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
804
805#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
806 _zzq_default, _zzq_request, \
807 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
808 __extension__ \
809 ({volatile unsigned long int _zzq_args[6]; \
810 volatile unsigned long int _zzq_result; \
811 _zzq_args[0] = (unsigned long int)(_zzq_request); \
812 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
813 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
814 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
815 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
816 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
817 __asm__ volatile(/* r2 = args */ \
818 "lgr 2,%1\n\t" \
819 /* r3 = default */ \
820 "lgr 3,%2\n\t" \
821 __SPECIAL_INSTRUCTION_PREAMBLE \
822 __CLIENT_REQUEST_CODE \
823 /* results = r3 */ \
824 "lgr %0, 3\n\t" \
825 : "=d" (_zzq_result) \
826 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
827 : "cc", "2", "3", "memory" \
828 ); \
829 _zzq_result; \
830 })
831
832#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
833 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
834 volatile unsigned long int __addr; \
835 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
836 __GET_NR_CONTEXT_CODE \
837 "lgr %0, 3\n\t" \
838 : "=a" (__addr) \
839 : \
840 : "cc", "3", "memory" \
841 ); \
842 _zzq_orig->nraddr = __addr; \
843 }
844
845#define VALGRIND_CALL_NOREDIR_R1 \
846 __SPECIAL_INSTRUCTION_PREAMBLE \
847 __CALL_NO_REDIR_CODE
848
849#define VALGRIND_VEX_INJECT_IR() \
850 do { \
851 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
852 __VEX_INJECT_IR_CODE); \
853 } while (0)
854
855#endif /* PLAT_s390x_linux */
856
857/* ------------------------- mips32-linux ---------------- */
858
859#if defined(PLAT_mips32_linux)
860
861typedef
862 struct {
863 unsigned int nraddr; /* where's the code? */
864 }
865 OrigFn;
866
867/* .word 0x342
868 * .word 0x742
869 * .word 0xC2
870 * .word 0x4C2*/
871#define __SPECIAL_INSTRUCTION_PREAMBLE \
872 "srl $0, $0, 13\n\t" \
873 "srl $0, $0, 29\n\t" \
874 "srl $0, $0, 3\n\t" \
875 "srl $0, $0, 19\n\t"
876
877#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
878 _zzq_default, _zzq_request, \
879 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
880 __extension__ \
881 ({ volatile unsigned int _zzq_args[6]; \
882 volatile unsigned int _zzq_result; \
883 _zzq_args[0] = (unsigned int)(_zzq_request); \
884 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
885 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
886 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
887 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
888 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
889 __asm__ volatile("move $11, %1\n\t" /*default*/ \
890 "move $12, %2\n\t" /*ptr*/ \
891 __SPECIAL_INSTRUCTION_PREAMBLE \
892 /* T3 = client_request ( T4 ) */ \
893 "or $13, $13, $13\n\t" \
894 "move %0, $11\n\t" /*result*/ \
895 : "=r" (_zzq_result) \
896 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
897 : "$11", "$12", "memory"); \
898 _zzq_result; \
899 })
900
901#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
902 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
903 volatile unsigned int __addr; \
904 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
905 /* %t9 = guest_NRADDR */ \
906 "or $14, $14, $14\n\t" \
907 "move %0, $11" /*result*/ \
908 : "=r" (__addr) \
909 : \
910 : "$11" \
911 ); \
912 _zzq_orig->nraddr = __addr; \
913 }
914
915#define VALGRIND_CALL_NOREDIR_T9 \
916 __SPECIAL_INSTRUCTION_PREAMBLE \
917 /* call-noredir *%t9 */ \
918 "or $15, $15, $15\n\t"
919
920#define VALGRIND_VEX_INJECT_IR() \
921 do { \
922 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
923 "or $11, $11, $11\n\t" \
924 ); \
925 } while (0)
926
Sean Anderson3b004842022-03-23 14:04:48 -0400927#endif /* PLAT_mips32_linux */
928
929/* ------------------------- mips64-linux ---------------- */
930
931#if defined(PLAT_mips64_linux)
932
933typedef
934 struct {
935 unsigned long nraddr; /* where's the code? */
936 }
937 OrigFn;
938
939/* dsll $0,$0, 3
940 * dsll $0,$0, 13
941 * dsll $0,$0, 29
942 * dsll $0,$0, 19*/
943#define __SPECIAL_INSTRUCTION_PREAMBLE \
944 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
945 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
946
947#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
948 _zzq_default, _zzq_request, \
949 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
950 __extension__ \
951 ({ volatile unsigned long int _zzq_args[6]; \
952 volatile unsigned long int _zzq_result; \
953 _zzq_args[0] = (unsigned long int)(_zzq_request); \
954 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
955 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
956 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
957 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
958 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
959 __asm__ volatile("move $11, %1\n\t" /*default*/ \
960 "move $12, %2\n\t" /*ptr*/ \
961 __SPECIAL_INSTRUCTION_PREAMBLE \
962 /* $11 = client_request ( $12 ) */ \
963 "or $13, $13, $13\n\t" \
964 "move %0, $11\n\t" /*result*/ \
965 : "=r" (_zzq_result) \
966 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
967 : "$11", "$12", "memory"); \
968 _zzq_result; \
969 })
970
971#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
972 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
973 volatile unsigned long int __addr; \
974 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
975 /* $11 = guest_NRADDR */ \
976 "or $14, $14, $14\n\t" \
977 "move %0, $11" /*result*/ \
978 : "=r" (__addr) \
979 : \
980 : "$11"); \
981 _zzq_orig->nraddr = __addr; \
982 }
983
984#define VALGRIND_CALL_NOREDIR_T9 \
985 __SPECIAL_INSTRUCTION_PREAMBLE \
986 /* call-noredir $25 */ \
987 "or $15, $15, $15\n\t"
988
989#define VALGRIND_VEX_INJECT_IR() \
990 do { \
991 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
992 "or $11, $11, $11\n\t" \
993 ); \
994 } while (0)
995
996#endif /* PLAT_mips64_linux */
997
998#if defined(PLAT_nanomips_linux)
999
1000typedef
1001 struct {
1002 unsigned int nraddr; /* where's the code? */
1003 }
1004 OrigFn;
1005/*
1006 8000 c04d srl zero, zero, 13
1007 8000 c05d srl zero, zero, 29
1008 8000 c043 srl zero, zero, 3
1009 8000 c053 srl zero, zero, 19
1010*/
1011
1012#define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1013 "srl[32] $zero, $zero, 29 \n\t" \
1014 "srl[32] $zero, $zero, 3 \n\t" \
1015 "srl[32] $zero, $zero, 19 \n\t"
1016
1017#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1018 _zzq_default, _zzq_request, \
1019 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1020 __extension__ \
1021 ({ volatile unsigned int _zzq_args[6]; \
1022 volatile unsigned int _zzq_result; \
1023 _zzq_args[0] = (unsigned int)(_zzq_request); \
1024 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1025 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1026 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1027 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1028 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1029 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1030 "move $t0, %2\n\t" /* ptr */ \
1031 __SPECIAL_INSTRUCTION_PREAMBLE \
1032 /* $a7 = client_request( $t0 ) */ \
1033 "or[32] $t0, $t0, $t0\n\t" \
1034 "move %0, $a7\n\t" /* result */ \
1035 : "=r" (_zzq_result) \
1036 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1037 : "$a7", "$t0", "memory"); \
1038 _zzq_result; \
1039 })
1040
1041#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1042 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1043 volatile unsigned long int __addr; \
1044 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1045 /* $a7 = guest_NRADDR */ \
1046 "or[32] $t1, $t1, $t1\n\t" \
1047 "move %0, $a7" /*result*/ \
1048 : "=r" (__addr) \
1049 : \
1050 : "$a7"); \
1051 _zzq_orig->nraddr = __addr; \
1052 }
1053
1054#define VALGRIND_CALL_NOREDIR_T9 \
1055 __SPECIAL_INSTRUCTION_PREAMBLE \
1056 /* call-noredir $25 */ \
1057 "or[32] $t2, $t2, $t2\n\t"
1058
1059#define VALGRIND_VEX_INJECT_IR() \
1060 do { \
1061 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1062 "or[32] $t3, $t3, $t3\n\t" \
1063 ); \
1064 } while (0)
1065
1066#endif
1067/* Insert assembly code for other platforms here... */
1068
1069#endif /* CONFIG_VALGRIND */
1070
Sean Anderson3b004842022-03-23 14:04:48 -04001071/* ------------------------------------------------------------------ */
1072/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1073/* ugly. It's the least-worst tradeoff I can think of. */
1074/* ------------------------------------------------------------------ */
1075
1076/* This section defines magic (a.k.a appalling-hack) macros for doing
1077 guaranteed-no-redirection macros, so as to get from function
1078 wrappers to the functions they are wrapping. The whole point is to
1079 construct standard call sequences, but to do the call itself with a
1080 special no-redirect call pseudo-instruction that the JIT
1081 understands and handles specially. This section is long and
1082 repetitious, and I can't see a way to make it shorter.
1083
1084 The naming scheme is as follows:
1085
1086 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1087
1088 'W' stands for "word" and 'v' for "void". Hence there are
1089 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1090 and for each, the possibility of returning a word-typed result, or
1091 no result.
1092*/
1093
1094/* Use these to write the name of your wrapper. NOTE: duplicates
1095 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1096 the default behaviour equivalance class tag "0000" into the name.
1097 See pub_tool_redir.h for details -- normally you don't need to
1098 think about this, though. */
1099
1100/* Use an extra level of macroisation so as to ensure the soname/fnname
1101 args are fully macro-expanded before pasting them together. */
1102#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1103
1104#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1105 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1106
1107#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1108 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1109
1110/* Use this macro from within a wrapper function to collect the
1111 context (address and possibly other info) of the original function.
1112 Once you have that you can then use it in one of the CALL_FN_
1113 macros. The type of the argument _lval is OrigFn. */
1114#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1115
1116/* Also provide end-user facilities for function replacement, rather
1117 than wrapping. A replacement function differs from a wrapper in
1118 that it has no way to get hold of the original function being
1119 called, and hence no way to call onwards to it. In a replacement
1120 function, VALGRIND_GET_ORIG_FN always returns zero. */
1121
1122#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1123 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1124
1125#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1126 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1127
1128/* Derivatives of the main macros below, for calling functions
1129 returning void. */
1130
1131#define CALL_FN_v_v(fnptr) \
1132 do { volatile unsigned long _junk; \
1133 CALL_FN_W_v(_junk,fnptr); } while (0)
1134
1135#define CALL_FN_v_W(fnptr, arg1) \
1136 do { volatile unsigned long _junk; \
1137 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1138
1139#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1140 do { volatile unsigned long _junk; \
1141 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1142
1143#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1144 do { volatile unsigned long _junk; \
1145 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1146
1147#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1148 do { volatile unsigned long _junk; \
1149 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1150
1151#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1152 do { volatile unsigned long _junk; \
1153 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1154
1155#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1156 do { volatile unsigned long _junk; \
1157 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1158
1159#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1160 do { volatile unsigned long _junk; \
1161 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1162
1163/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1164
1165#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1166 || defined(PLAT_x86_solaris)
1167
1168/* These regs are trashed by the hidden call. No need to mention eax
1169 as gcc can already see that, plus causes gcc to bomb. */
1170#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1171
1172/* Macros to save and align the stack before making a function
1173 call and restore it afterwards as gcc may not keep the stack
1174 pointer aligned if it doesn't realise calls are being made
1175 to other functions. */
1176
1177#define VALGRIND_ALIGN_STACK \
1178 "movl %%esp,%%edi\n\t" \
1179 "andl $0xfffffff0,%%esp\n\t"
1180#define VALGRIND_RESTORE_STACK \
1181 "movl %%edi,%%esp\n\t"
1182
1183/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1184 long) == 4. */
1185
1186#define CALL_FN_W_v(lval, orig) \
1187 do { \
1188 volatile OrigFn _orig = (orig); \
1189 volatile unsigned long _argvec[1]; \
1190 volatile unsigned long _res; \
1191 _argvec[0] = (unsigned long)_orig.nraddr; \
1192 __asm__ volatile( \
1193 VALGRIND_ALIGN_STACK \
1194 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1195 VALGRIND_CALL_NOREDIR_EAX \
1196 VALGRIND_RESTORE_STACK \
1197 : /*out*/ "=a" (_res) \
1198 : /*in*/ "a" (&_argvec[0]) \
1199 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1200 ); \
1201 lval = (__typeof__(lval)) _res; \
1202 } while (0)
1203
1204#define CALL_FN_W_W(lval, orig, arg1) \
1205 do { \
1206 volatile OrigFn _orig = (orig); \
1207 volatile unsigned long _argvec[2]; \
1208 volatile unsigned long _res; \
1209 _argvec[0] = (unsigned long)_orig.nraddr; \
1210 _argvec[1] = (unsigned long)(arg1); \
1211 __asm__ volatile( \
1212 VALGRIND_ALIGN_STACK \
1213 "subl $12, %%esp\n\t" \
1214 "pushl 4(%%eax)\n\t" \
1215 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1216 VALGRIND_CALL_NOREDIR_EAX \
1217 VALGRIND_RESTORE_STACK \
1218 : /*out*/ "=a" (_res) \
1219 : /*in*/ "a" (&_argvec[0]) \
1220 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1221 ); \
1222 lval = (__typeof__(lval)) _res; \
1223 } while (0)
1224
1225#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1226 do { \
1227 volatile OrigFn _orig = (orig); \
1228 volatile unsigned long _argvec[3]; \
1229 volatile unsigned long _res; \
1230 _argvec[0] = (unsigned long)_orig.nraddr; \
1231 _argvec[1] = (unsigned long)(arg1); \
1232 _argvec[2] = (unsigned long)(arg2); \
1233 __asm__ volatile( \
1234 VALGRIND_ALIGN_STACK \
1235 "subl $8, %%esp\n\t" \
1236 "pushl 8(%%eax)\n\t" \
1237 "pushl 4(%%eax)\n\t" \
1238 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1239 VALGRIND_CALL_NOREDIR_EAX \
1240 VALGRIND_RESTORE_STACK \
1241 : /*out*/ "=a" (_res) \
1242 : /*in*/ "a" (&_argvec[0]) \
1243 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1244 ); \
1245 lval = (__typeof__(lval)) _res; \
1246 } while (0)
1247
1248#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1249 do { \
1250 volatile OrigFn _orig = (orig); \
1251 volatile unsigned long _argvec[4]; \
1252 volatile unsigned long _res; \
1253 _argvec[0] = (unsigned long)_orig.nraddr; \
1254 _argvec[1] = (unsigned long)(arg1); \
1255 _argvec[2] = (unsigned long)(arg2); \
1256 _argvec[3] = (unsigned long)(arg3); \
1257 __asm__ volatile( \
1258 VALGRIND_ALIGN_STACK \
1259 "subl $4, %%esp\n\t" \
1260 "pushl 12(%%eax)\n\t" \
1261 "pushl 8(%%eax)\n\t" \
1262 "pushl 4(%%eax)\n\t" \
1263 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1264 VALGRIND_CALL_NOREDIR_EAX \
1265 VALGRIND_RESTORE_STACK \
1266 : /*out*/ "=a" (_res) \
1267 : /*in*/ "a" (&_argvec[0]) \
1268 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1269 ); \
1270 lval = (__typeof__(lval)) _res; \
1271 } while (0)
1272
1273#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1274 do { \
1275 volatile OrigFn _orig = (orig); \
1276 volatile unsigned long _argvec[5]; \
1277 volatile unsigned long _res; \
1278 _argvec[0] = (unsigned long)_orig.nraddr; \
1279 _argvec[1] = (unsigned long)(arg1); \
1280 _argvec[2] = (unsigned long)(arg2); \
1281 _argvec[3] = (unsigned long)(arg3); \
1282 _argvec[4] = (unsigned long)(arg4); \
1283 __asm__ volatile( \
1284 VALGRIND_ALIGN_STACK \
1285 "pushl 16(%%eax)\n\t" \
1286 "pushl 12(%%eax)\n\t" \
1287 "pushl 8(%%eax)\n\t" \
1288 "pushl 4(%%eax)\n\t" \
1289 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1290 VALGRIND_CALL_NOREDIR_EAX \
1291 VALGRIND_RESTORE_STACK \
1292 : /*out*/ "=a" (_res) \
1293 : /*in*/ "a" (&_argvec[0]) \
1294 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1295 ); \
1296 lval = (__typeof__(lval)) _res; \
1297 } while (0)
1298
1299#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1300 do { \
1301 volatile OrigFn _orig = (orig); \
1302 volatile unsigned long _argvec[6]; \
1303 volatile unsigned long _res; \
1304 _argvec[0] = (unsigned long)_orig.nraddr; \
1305 _argvec[1] = (unsigned long)(arg1); \
1306 _argvec[2] = (unsigned long)(arg2); \
1307 _argvec[3] = (unsigned long)(arg3); \
1308 _argvec[4] = (unsigned long)(arg4); \
1309 _argvec[5] = (unsigned long)(arg5); \
1310 __asm__ volatile( \
1311 VALGRIND_ALIGN_STACK \
1312 "subl $12, %%esp\n\t" \
1313 "pushl 20(%%eax)\n\t" \
1314 "pushl 16(%%eax)\n\t" \
1315 "pushl 12(%%eax)\n\t" \
1316 "pushl 8(%%eax)\n\t" \
1317 "pushl 4(%%eax)\n\t" \
1318 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1319 VALGRIND_CALL_NOREDIR_EAX \
1320 VALGRIND_RESTORE_STACK \
1321 : /*out*/ "=a" (_res) \
1322 : /*in*/ "a" (&_argvec[0]) \
1323 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1324 ); \
1325 lval = (__typeof__(lval)) _res; \
1326 } while (0)
1327
1328#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1329 do { \
1330 volatile OrigFn _orig = (orig); \
1331 volatile unsigned long _argvec[7]; \
1332 volatile unsigned long _res; \
1333 _argvec[0] = (unsigned long)_orig.nraddr; \
1334 _argvec[1] = (unsigned long)(arg1); \
1335 _argvec[2] = (unsigned long)(arg2); \
1336 _argvec[3] = (unsigned long)(arg3); \
1337 _argvec[4] = (unsigned long)(arg4); \
1338 _argvec[5] = (unsigned long)(arg5); \
1339 _argvec[6] = (unsigned long)(arg6); \
1340 __asm__ volatile( \
1341 VALGRIND_ALIGN_STACK \
1342 "subl $8, %%esp\n\t" \
1343 "pushl 24(%%eax)\n\t" \
1344 "pushl 20(%%eax)\n\t" \
1345 "pushl 16(%%eax)\n\t" \
1346 "pushl 12(%%eax)\n\t" \
1347 "pushl 8(%%eax)\n\t" \
1348 "pushl 4(%%eax)\n\t" \
1349 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1350 VALGRIND_CALL_NOREDIR_EAX \
1351 VALGRIND_RESTORE_STACK \
1352 : /*out*/ "=a" (_res) \
1353 : /*in*/ "a" (&_argvec[0]) \
1354 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1355 ); \
1356 lval = (__typeof__(lval)) _res; \
1357 } while (0)
1358
1359#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1360 arg7) \
1361 do { \
1362 volatile OrigFn _orig = (orig); \
1363 volatile unsigned long _argvec[8]; \
1364 volatile unsigned long _res; \
1365 _argvec[0] = (unsigned long)_orig.nraddr; \
1366 _argvec[1] = (unsigned long)(arg1); \
1367 _argvec[2] = (unsigned long)(arg2); \
1368 _argvec[3] = (unsigned long)(arg3); \
1369 _argvec[4] = (unsigned long)(arg4); \
1370 _argvec[5] = (unsigned long)(arg5); \
1371 _argvec[6] = (unsigned long)(arg6); \
1372 _argvec[7] = (unsigned long)(arg7); \
1373 __asm__ volatile( \
1374 VALGRIND_ALIGN_STACK \
1375 "subl $4, %%esp\n\t" \
1376 "pushl 28(%%eax)\n\t" \
1377 "pushl 24(%%eax)\n\t" \
1378 "pushl 20(%%eax)\n\t" \
1379 "pushl 16(%%eax)\n\t" \
1380 "pushl 12(%%eax)\n\t" \
1381 "pushl 8(%%eax)\n\t" \
1382 "pushl 4(%%eax)\n\t" \
1383 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1384 VALGRIND_CALL_NOREDIR_EAX \
1385 VALGRIND_RESTORE_STACK \
1386 : /*out*/ "=a" (_res) \
1387 : /*in*/ "a" (&_argvec[0]) \
1388 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1389 ); \
1390 lval = (__typeof__(lval)) _res; \
1391 } while (0)
1392
1393#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1394 arg7,arg8) \
1395 do { \
1396 volatile OrigFn _orig = (orig); \
1397 volatile unsigned long _argvec[9]; \
1398 volatile unsigned long _res; \
1399 _argvec[0] = (unsigned long)_orig.nraddr; \
1400 _argvec[1] = (unsigned long)(arg1); \
1401 _argvec[2] = (unsigned long)(arg2); \
1402 _argvec[3] = (unsigned long)(arg3); \
1403 _argvec[4] = (unsigned long)(arg4); \
1404 _argvec[5] = (unsigned long)(arg5); \
1405 _argvec[6] = (unsigned long)(arg6); \
1406 _argvec[7] = (unsigned long)(arg7); \
1407 _argvec[8] = (unsigned long)(arg8); \
1408 __asm__ volatile( \
1409 VALGRIND_ALIGN_STACK \
1410 "pushl 32(%%eax)\n\t" \
1411 "pushl 28(%%eax)\n\t" \
1412 "pushl 24(%%eax)\n\t" \
1413 "pushl 20(%%eax)\n\t" \
1414 "pushl 16(%%eax)\n\t" \
1415 "pushl 12(%%eax)\n\t" \
1416 "pushl 8(%%eax)\n\t" \
1417 "pushl 4(%%eax)\n\t" \
1418 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1419 VALGRIND_CALL_NOREDIR_EAX \
1420 VALGRIND_RESTORE_STACK \
1421 : /*out*/ "=a" (_res) \
1422 : /*in*/ "a" (&_argvec[0]) \
1423 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1424 ); \
1425 lval = (__typeof__(lval)) _res; \
1426 } while (0)
1427
1428#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1429 arg7,arg8,arg9) \
1430 do { \
1431 volatile OrigFn _orig = (orig); \
1432 volatile unsigned long _argvec[10]; \
1433 volatile unsigned long _res; \
1434 _argvec[0] = (unsigned long)_orig.nraddr; \
1435 _argvec[1] = (unsigned long)(arg1); \
1436 _argvec[2] = (unsigned long)(arg2); \
1437 _argvec[3] = (unsigned long)(arg3); \
1438 _argvec[4] = (unsigned long)(arg4); \
1439 _argvec[5] = (unsigned long)(arg5); \
1440 _argvec[6] = (unsigned long)(arg6); \
1441 _argvec[7] = (unsigned long)(arg7); \
1442 _argvec[8] = (unsigned long)(arg8); \
1443 _argvec[9] = (unsigned long)(arg9); \
1444 __asm__ volatile( \
1445 VALGRIND_ALIGN_STACK \
1446 "subl $12, %%esp\n\t" \
1447 "pushl 36(%%eax)\n\t" \
1448 "pushl 32(%%eax)\n\t" \
1449 "pushl 28(%%eax)\n\t" \
1450 "pushl 24(%%eax)\n\t" \
1451 "pushl 20(%%eax)\n\t" \
1452 "pushl 16(%%eax)\n\t" \
1453 "pushl 12(%%eax)\n\t" \
1454 "pushl 8(%%eax)\n\t" \
1455 "pushl 4(%%eax)\n\t" \
1456 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1457 VALGRIND_CALL_NOREDIR_EAX \
1458 VALGRIND_RESTORE_STACK \
1459 : /*out*/ "=a" (_res) \
1460 : /*in*/ "a" (&_argvec[0]) \
1461 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1462 ); \
1463 lval = (__typeof__(lval)) _res; \
1464 } while (0)
1465
1466#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1467 arg7,arg8,arg9,arg10) \
1468 do { \
1469 volatile OrigFn _orig = (orig); \
1470 volatile unsigned long _argvec[11]; \
1471 volatile unsigned long _res; \
1472 _argvec[0] = (unsigned long)_orig.nraddr; \
1473 _argvec[1] = (unsigned long)(arg1); \
1474 _argvec[2] = (unsigned long)(arg2); \
1475 _argvec[3] = (unsigned long)(arg3); \
1476 _argvec[4] = (unsigned long)(arg4); \
1477 _argvec[5] = (unsigned long)(arg5); \
1478 _argvec[6] = (unsigned long)(arg6); \
1479 _argvec[7] = (unsigned long)(arg7); \
1480 _argvec[8] = (unsigned long)(arg8); \
1481 _argvec[9] = (unsigned long)(arg9); \
1482 _argvec[10] = (unsigned long)(arg10); \
1483 __asm__ volatile( \
1484 VALGRIND_ALIGN_STACK \
1485 "subl $8, %%esp\n\t" \
1486 "pushl 40(%%eax)\n\t" \
1487 "pushl 36(%%eax)\n\t" \
1488 "pushl 32(%%eax)\n\t" \
1489 "pushl 28(%%eax)\n\t" \
1490 "pushl 24(%%eax)\n\t" \
1491 "pushl 20(%%eax)\n\t" \
1492 "pushl 16(%%eax)\n\t" \
1493 "pushl 12(%%eax)\n\t" \
1494 "pushl 8(%%eax)\n\t" \
1495 "pushl 4(%%eax)\n\t" \
1496 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1497 VALGRIND_CALL_NOREDIR_EAX \
1498 VALGRIND_RESTORE_STACK \
1499 : /*out*/ "=a" (_res) \
1500 : /*in*/ "a" (&_argvec[0]) \
1501 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1502 ); \
1503 lval = (__typeof__(lval)) _res; \
1504 } while (0)
1505
1506#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1507 arg6,arg7,arg8,arg9,arg10, \
1508 arg11) \
1509 do { \
1510 volatile OrigFn _orig = (orig); \
1511 volatile unsigned long _argvec[12]; \
1512 volatile unsigned long _res; \
1513 _argvec[0] = (unsigned long)_orig.nraddr; \
1514 _argvec[1] = (unsigned long)(arg1); \
1515 _argvec[2] = (unsigned long)(arg2); \
1516 _argvec[3] = (unsigned long)(arg3); \
1517 _argvec[4] = (unsigned long)(arg4); \
1518 _argvec[5] = (unsigned long)(arg5); \
1519 _argvec[6] = (unsigned long)(arg6); \
1520 _argvec[7] = (unsigned long)(arg7); \
1521 _argvec[8] = (unsigned long)(arg8); \
1522 _argvec[9] = (unsigned long)(arg9); \
1523 _argvec[10] = (unsigned long)(arg10); \
1524 _argvec[11] = (unsigned long)(arg11); \
1525 __asm__ volatile( \
1526 VALGRIND_ALIGN_STACK \
1527 "subl $4, %%esp\n\t" \
1528 "pushl 44(%%eax)\n\t" \
1529 "pushl 40(%%eax)\n\t" \
1530 "pushl 36(%%eax)\n\t" \
1531 "pushl 32(%%eax)\n\t" \
1532 "pushl 28(%%eax)\n\t" \
1533 "pushl 24(%%eax)\n\t" \
1534 "pushl 20(%%eax)\n\t" \
1535 "pushl 16(%%eax)\n\t" \
1536 "pushl 12(%%eax)\n\t" \
1537 "pushl 8(%%eax)\n\t" \
1538 "pushl 4(%%eax)\n\t" \
1539 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1540 VALGRIND_CALL_NOREDIR_EAX \
1541 VALGRIND_RESTORE_STACK \
1542 : /*out*/ "=a" (_res) \
1543 : /*in*/ "a" (&_argvec[0]) \
1544 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1545 ); \
1546 lval = (__typeof__(lval)) _res; \
1547 } while (0)
1548
1549#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1550 arg6,arg7,arg8,arg9,arg10, \
1551 arg11,arg12) \
1552 do { \
1553 volatile OrigFn _orig = (orig); \
1554 volatile unsigned long _argvec[13]; \
1555 volatile unsigned long _res; \
1556 _argvec[0] = (unsigned long)_orig.nraddr; \
1557 _argvec[1] = (unsigned long)(arg1); \
1558 _argvec[2] = (unsigned long)(arg2); \
1559 _argvec[3] = (unsigned long)(arg3); \
1560 _argvec[4] = (unsigned long)(arg4); \
1561 _argvec[5] = (unsigned long)(arg5); \
1562 _argvec[6] = (unsigned long)(arg6); \
1563 _argvec[7] = (unsigned long)(arg7); \
1564 _argvec[8] = (unsigned long)(arg8); \
1565 _argvec[9] = (unsigned long)(arg9); \
1566 _argvec[10] = (unsigned long)(arg10); \
1567 _argvec[11] = (unsigned long)(arg11); \
1568 _argvec[12] = (unsigned long)(arg12); \
1569 __asm__ volatile( \
1570 VALGRIND_ALIGN_STACK \
1571 "pushl 48(%%eax)\n\t" \
1572 "pushl 44(%%eax)\n\t" \
1573 "pushl 40(%%eax)\n\t" \
1574 "pushl 36(%%eax)\n\t" \
1575 "pushl 32(%%eax)\n\t" \
1576 "pushl 28(%%eax)\n\t" \
1577 "pushl 24(%%eax)\n\t" \
1578 "pushl 20(%%eax)\n\t" \
1579 "pushl 16(%%eax)\n\t" \
1580 "pushl 12(%%eax)\n\t" \
1581 "pushl 8(%%eax)\n\t" \
1582 "pushl 4(%%eax)\n\t" \
1583 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1584 VALGRIND_CALL_NOREDIR_EAX \
1585 VALGRIND_RESTORE_STACK \
1586 : /*out*/ "=a" (_res) \
1587 : /*in*/ "a" (&_argvec[0]) \
1588 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1589 ); \
1590 lval = (__typeof__(lval)) _res; \
1591 } while (0)
1592
1593#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1594
1595/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1596
1597#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1598 || defined(PLAT_amd64_solaris)
1599
1600/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1601
1602/* These regs are trashed by the hidden call. */
1603#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1604 "rdi", "r8", "r9", "r10", "r11"
1605
1606/* This is all pretty complex. It's so as to make stack unwinding
1607 work reliably. See bug 243270. The basic problem is the sub and
1608 add of 128 of %rsp in all of the following macros. If gcc believes
1609 the CFA is in %rsp, then unwinding may fail, because what's at the
1610 CFA is not what gcc "expected" when it constructs the CFIs for the
1611 places where the macros are instantiated.
1612
1613 But we can't just add a CFI annotation to increase the CFA offset
1614 by 128, to match the sub of 128 from %rsp, because we don't know
1615 whether gcc has chosen %rsp as the CFA at that point, or whether it
1616 has chosen some other register (eg, %rbp). In the latter case,
1617 adding a CFI annotation to change the CFA offset is simply wrong.
1618
1619 So the solution is to get hold of the CFA using
1620 __builtin_dwarf_cfa(), put it in a known register, and add a
1621 CFI annotation to say what the register is. We choose %rbp for
1622 this (perhaps perversely), because:
1623
1624 (1) %rbp is already subject to unwinding. If a new register was
1625 chosen then the unwinder would have to unwind it in all stack
1626 traces, which is expensive, and
1627
1628 (2) %rbp is already subject to precise exception updates in the
1629 JIT. If a new register was chosen, we'd have to have precise
1630 exceptions for it too, which reduces performance of the
1631 generated code.
1632
1633 However .. one extra complication. We can't just whack the result
1634 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1635 list of trashed registers at the end of the inline assembly
1636 fragments; gcc won't allow %rbp to appear in that list. Hence
1637 instead we need to stash %rbp in %r15 for the duration of the asm,
1638 and say that %r15 is trashed instead. gcc seems happy to go with
1639 that.
1640
1641 Oh .. and this all needs to be conditionalised so that it is
1642 unchanged from before this commit, when compiled with older gccs
1643 that don't support __builtin_dwarf_cfa. Furthermore, since
1644 this header file is freestanding, it has to be independent of
1645 config.h, and so the following conditionalisation cannot depend on
1646 configure time checks.
1647
1648 Although it's not clear from
1649 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1650 this expression excludes Darwin.
1651 .cfi directives in Darwin assembly appear to be completely
1652 different and I haven't investigated how they work.
1653
1654 For even more entertainment value, note we have to use the
1655 completely undocumented __builtin_dwarf_cfa(), which appears to
1656 really compute the CFA, whereas __builtin_frame_address(0) claims
1657 to but actually doesn't. See
1658 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1659*/
1660#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1661# define __FRAME_POINTER \
1662 ,"r"(__builtin_dwarf_cfa())
1663# define VALGRIND_CFI_PROLOGUE \
1664 "movq %%rbp, %%r15\n\t" \
1665 "movq %2, %%rbp\n\t" \
1666 ".cfi_remember_state\n\t" \
1667 ".cfi_def_cfa rbp, 0\n\t"
1668# define VALGRIND_CFI_EPILOGUE \
1669 "movq %%r15, %%rbp\n\t" \
1670 ".cfi_restore_state\n\t"
1671#else
1672# define __FRAME_POINTER
1673# define VALGRIND_CFI_PROLOGUE
1674# define VALGRIND_CFI_EPILOGUE
1675#endif
1676
1677/* Macros to save and align the stack before making a function
1678 call and restore it afterwards as gcc may not keep the stack
1679 pointer aligned if it doesn't realise calls are being made
1680 to other functions. */
1681
1682#define VALGRIND_ALIGN_STACK \
1683 "movq %%rsp,%%r14\n\t" \
1684 "andq $0xfffffffffffffff0,%%rsp\n\t"
1685#define VALGRIND_RESTORE_STACK \
1686 "movq %%r14,%%rsp\n\t"
1687
1688/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1689 long) == 8. */
1690
1691/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1692 macros. In order not to trash the stack redzone, we need to drop
1693 %rsp by 128 before the hidden call, and restore afterwards. The
1694 nastyness is that it is only by luck that the stack still appears
1695 to be unwindable during the hidden call - since then the behaviour
1696 of any routine using this macro does not match what the CFI data
1697 says. Sigh.
1698
1699 Why is this important? Imagine that a wrapper has a stack
1700 allocated local, and passes to the hidden call, a pointer to it.
1701 Because gcc does not know about the hidden call, it may allocate
1702 that local in the redzone. Unfortunately the hidden call may then
1703 trash it before it comes to use it. So we must step clear of the
1704 redzone, for the duration of the hidden call, to make it safe.
1705
1706 Probably the same problem afflicts the other redzone-style ABIs too
1707 (ppc64-linux); but for those, the stack is
1708 self describing (none of this CFI nonsense) so at least messing
1709 with the stack pointer doesn't give a danger of non-unwindable
1710 stack. */
1711
1712#define CALL_FN_W_v(lval, orig) \
1713 do { \
1714 volatile OrigFn _orig = (orig); \
1715 volatile unsigned long _argvec[1]; \
1716 volatile unsigned long _res; \
1717 _argvec[0] = (unsigned long)_orig.nraddr; \
1718 __asm__ volatile( \
1719 VALGRIND_CFI_PROLOGUE \
1720 VALGRIND_ALIGN_STACK \
1721 "subq $128,%%rsp\n\t" \
1722 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1723 VALGRIND_CALL_NOREDIR_RAX \
1724 VALGRIND_RESTORE_STACK \
1725 VALGRIND_CFI_EPILOGUE \
1726 : /*out*/ "=a" (_res) \
1727 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1728 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1729 ); \
1730 lval = (__typeof__(lval)) _res; \
1731 } while (0)
1732
1733#define CALL_FN_W_W(lval, orig, arg1) \
1734 do { \
1735 volatile OrigFn _orig = (orig); \
1736 volatile unsigned long _argvec[2]; \
1737 volatile unsigned long _res; \
1738 _argvec[0] = (unsigned long)_orig.nraddr; \
1739 _argvec[1] = (unsigned long)(arg1); \
1740 __asm__ volatile( \
1741 VALGRIND_CFI_PROLOGUE \
1742 VALGRIND_ALIGN_STACK \
1743 "subq $128,%%rsp\n\t" \
1744 "movq 8(%%rax), %%rdi\n\t" \
1745 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1746 VALGRIND_CALL_NOREDIR_RAX \
1747 VALGRIND_RESTORE_STACK \
1748 VALGRIND_CFI_EPILOGUE \
1749 : /*out*/ "=a" (_res) \
1750 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1751 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1752 ); \
1753 lval = (__typeof__(lval)) _res; \
1754 } while (0)
1755
1756#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1757 do { \
1758 volatile OrigFn _orig = (orig); \
1759 volatile unsigned long _argvec[3]; \
1760 volatile unsigned long _res; \
1761 _argvec[0] = (unsigned long)_orig.nraddr; \
1762 _argvec[1] = (unsigned long)(arg1); \
1763 _argvec[2] = (unsigned long)(arg2); \
1764 __asm__ volatile( \
1765 VALGRIND_CFI_PROLOGUE \
1766 VALGRIND_ALIGN_STACK \
1767 "subq $128,%%rsp\n\t" \
1768 "movq 16(%%rax), %%rsi\n\t" \
1769 "movq 8(%%rax), %%rdi\n\t" \
1770 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1771 VALGRIND_CALL_NOREDIR_RAX \
1772 VALGRIND_RESTORE_STACK \
1773 VALGRIND_CFI_EPILOGUE \
1774 : /*out*/ "=a" (_res) \
1775 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1776 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1777 ); \
1778 lval = (__typeof__(lval)) _res; \
1779 } while (0)
1780
1781#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1782 do { \
1783 volatile OrigFn _orig = (orig); \
1784 volatile unsigned long _argvec[4]; \
1785 volatile unsigned long _res; \
1786 _argvec[0] = (unsigned long)_orig.nraddr; \
1787 _argvec[1] = (unsigned long)(arg1); \
1788 _argvec[2] = (unsigned long)(arg2); \
1789 _argvec[3] = (unsigned long)(arg3); \
1790 __asm__ volatile( \
1791 VALGRIND_CFI_PROLOGUE \
1792 VALGRIND_ALIGN_STACK \
1793 "subq $128,%%rsp\n\t" \
1794 "movq 24(%%rax), %%rdx\n\t" \
1795 "movq 16(%%rax), %%rsi\n\t" \
1796 "movq 8(%%rax), %%rdi\n\t" \
1797 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1798 VALGRIND_CALL_NOREDIR_RAX \
1799 VALGRIND_RESTORE_STACK \
1800 VALGRIND_CFI_EPILOGUE \
1801 : /*out*/ "=a" (_res) \
1802 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1803 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1804 ); \
1805 lval = (__typeof__(lval)) _res; \
1806 } while (0)
1807
1808#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1809 do { \
1810 volatile OrigFn _orig = (orig); \
1811 volatile unsigned long _argvec[5]; \
1812 volatile unsigned long _res; \
1813 _argvec[0] = (unsigned long)_orig.nraddr; \
1814 _argvec[1] = (unsigned long)(arg1); \
1815 _argvec[2] = (unsigned long)(arg2); \
1816 _argvec[3] = (unsigned long)(arg3); \
1817 _argvec[4] = (unsigned long)(arg4); \
1818 __asm__ volatile( \
1819 VALGRIND_CFI_PROLOGUE \
1820 VALGRIND_ALIGN_STACK \
1821 "subq $128,%%rsp\n\t" \
1822 "movq 32(%%rax), %%rcx\n\t" \
1823 "movq 24(%%rax), %%rdx\n\t" \
1824 "movq 16(%%rax), %%rsi\n\t" \
1825 "movq 8(%%rax), %%rdi\n\t" \
1826 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1827 VALGRIND_CALL_NOREDIR_RAX \
1828 VALGRIND_RESTORE_STACK \
1829 VALGRIND_CFI_EPILOGUE \
1830 : /*out*/ "=a" (_res) \
1831 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1832 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1833 ); \
1834 lval = (__typeof__(lval)) _res; \
1835 } while (0)
1836
1837#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1838 do { \
1839 volatile OrigFn _orig = (orig); \
1840 volatile unsigned long _argvec[6]; \
1841 volatile unsigned long _res; \
1842 _argvec[0] = (unsigned long)_orig.nraddr; \
1843 _argvec[1] = (unsigned long)(arg1); \
1844 _argvec[2] = (unsigned long)(arg2); \
1845 _argvec[3] = (unsigned long)(arg3); \
1846 _argvec[4] = (unsigned long)(arg4); \
1847 _argvec[5] = (unsigned long)(arg5); \
1848 __asm__ volatile( \
1849 VALGRIND_CFI_PROLOGUE \
1850 VALGRIND_ALIGN_STACK \
1851 "subq $128,%%rsp\n\t" \
1852 "movq 40(%%rax), %%r8\n\t" \
1853 "movq 32(%%rax), %%rcx\n\t" \
1854 "movq 24(%%rax), %%rdx\n\t" \
1855 "movq 16(%%rax), %%rsi\n\t" \
1856 "movq 8(%%rax), %%rdi\n\t" \
1857 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1858 VALGRIND_CALL_NOREDIR_RAX \
1859 VALGRIND_RESTORE_STACK \
1860 VALGRIND_CFI_EPILOGUE \
1861 : /*out*/ "=a" (_res) \
1862 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1863 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1864 ); \
1865 lval = (__typeof__(lval)) _res; \
1866 } while (0)
1867
1868#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1869 do { \
1870 volatile OrigFn _orig = (orig); \
1871 volatile unsigned long _argvec[7]; \
1872 volatile unsigned long _res; \
1873 _argvec[0] = (unsigned long)_orig.nraddr; \
1874 _argvec[1] = (unsigned long)(arg1); \
1875 _argvec[2] = (unsigned long)(arg2); \
1876 _argvec[3] = (unsigned long)(arg3); \
1877 _argvec[4] = (unsigned long)(arg4); \
1878 _argvec[5] = (unsigned long)(arg5); \
1879 _argvec[6] = (unsigned long)(arg6); \
1880 __asm__ volatile( \
1881 VALGRIND_CFI_PROLOGUE \
1882 VALGRIND_ALIGN_STACK \
1883 "subq $128,%%rsp\n\t" \
1884 "movq 48(%%rax), %%r9\n\t" \
1885 "movq 40(%%rax), %%r8\n\t" \
1886 "movq 32(%%rax), %%rcx\n\t" \
1887 "movq 24(%%rax), %%rdx\n\t" \
1888 "movq 16(%%rax), %%rsi\n\t" \
1889 "movq 8(%%rax), %%rdi\n\t" \
1890 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1891 VALGRIND_CALL_NOREDIR_RAX \
1892 VALGRIND_RESTORE_STACK \
1893 VALGRIND_CFI_EPILOGUE \
1894 : /*out*/ "=a" (_res) \
1895 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1896 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1897 ); \
1898 lval = (__typeof__(lval)) _res; \
1899 } while (0)
1900
1901#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1902 arg7) \
1903 do { \
1904 volatile OrigFn _orig = (orig); \
1905 volatile unsigned long _argvec[8]; \
1906 volatile unsigned long _res; \
1907 _argvec[0] = (unsigned long)_orig.nraddr; \
1908 _argvec[1] = (unsigned long)(arg1); \
1909 _argvec[2] = (unsigned long)(arg2); \
1910 _argvec[3] = (unsigned long)(arg3); \
1911 _argvec[4] = (unsigned long)(arg4); \
1912 _argvec[5] = (unsigned long)(arg5); \
1913 _argvec[6] = (unsigned long)(arg6); \
1914 _argvec[7] = (unsigned long)(arg7); \
1915 __asm__ volatile( \
1916 VALGRIND_CFI_PROLOGUE \
1917 VALGRIND_ALIGN_STACK \
1918 "subq $136,%%rsp\n\t" \
1919 "pushq 56(%%rax)\n\t" \
1920 "movq 48(%%rax), %%r9\n\t" \
1921 "movq 40(%%rax), %%r8\n\t" \
1922 "movq 32(%%rax), %%rcx\n\t" \
1923 "movq 24(%%rax), %%rdx\n\t" \
1924 "movq 16(%%rax), %%rsi\n\t" \
1925 "movq 8(%%rax), %%rdi\n\t" \
1926 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1927 VALGRIND_CALL_NOREDIR_RAX \
1928 VALGRIND_RESTORE_STACK \
1929 VALGRIND_CFI_EPILOGUE \
1930 : /*out*/ "=a" (_res) \
1931 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1932 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1933 ); \
1934 lval = (__typeof__(lval)) _res; \
1935 } while (0)
1936
1937#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1938 arg7,arg8) \
1939 do { \
1940 volatile OrigFn _orig = (orig); \
1941 volatile unsigned long _argvec[9]; \
1942 volatile unsigned long _res; \
1943 _argvec[0] = (unsigned long)_orig.nraddr; \
1944 _argvec[1] = (unsigned long)(arg1); \
1945 _argvec[2] = (unsigned long)(arg2); \
1946 _argvec[3] = (unsigned long)(arg3); \
1947 _argvec[4] = (unsigned long)(arg4); \
1948 _argvec[5] = (unsigned long)(arg5); \
1949 _argvec[6] = (unsigned long)(arg6); \
1950 _argvec[7] = (unsigned long)(arg7); \
1951 _argvec[8] = (unsigned long)(arg8); \
1952 __asm__ volatile( \
1953 VALGRIND_CFI_PROLOGUE \
1954 VALGRIND_ALIGN_STACK \
1955 "subq $128,%%rsp\n\t" \
1956 "pushq 64(%%rax)\n\t" \
1957 "pushq 56(%%rax)\n\t" \
1958 "movq 48(%%rax), %%r9\n\t" \
1959 "movq 40(%%rax), %%r8\n\t" \
1960 "movq 32(%%rax), %%rcx\n\t" \
1961 "movq 24(%%rax), %%rdx\n\t" \
1962 "movq 16(%%rax), %%rsi\n\t" \
1963 "movq 8(%%rax), %%rdi\n\t" \
1964 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1965 VALGRIND_CALL_NOREDIR_RAX \
1966 VALGRIND_RESTORE_STACK \
1967 VALGRIND_CFI_EPILOGUE \
1968 : /*out*/ "=a" (_res) \
1969 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1970 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1971 ); \
1972 lval = (__typeof__(lval)) _res; \
1973 } while (0)
1974
1975#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1976 arg7,arg8,arg9) \
1977 do { \
1978 volatile OrigFn _orig = (orig); \
1979 volatile unsigned long _argvec[10]; \
1980 volatile unsigned long _res; \
1981 _argvec[0] = (unsigned long)_orig.nraddr; \
1982 _argvec[1] = (unsigned long)(arg1); \
1983 _argvec[2] = (unsigned long)(arg2); \
1984 _argvec[3] = (unsigned long)(arg3); \
1985 _argvec[4] = (unsigned long)(arg4); \
1986 _argvec[5] = (unsigned long)(arg5); \
1987 _argvec[6] = (unsigned long)(arg6); \
1988 _argvec[7] = (unsigned long)(arg7); \
1989 _argvec[8] = (unsigned long)(arg8); \
1990 _argvec[9] = (unsigned long)(arg9); \
1991 __asm__ volatile( \
1992 VALGRIND_CFI_PROLOGUE \
1993 VALGRIND_ALIGN_STACK \
1994 "subq $136,%%rsp\n\t" \
1995 "pushq 72(%%rax)\n\t" \
1996 "pushq 64(%%rax)\n\t" \
1997 "pushq 56(%%rax)\n\t" \
1998 "movq 48(%%rax), %%r9\n\t" \
1999 "movq 40(%%rax), %%r8\n\t" \
2000 "movq 32(%%rax), %%rcx\n\t" \
2001 "movq 24(%%rax), %%rdx\n\t" \
2002 "movq 16(%%rax), %%rsi\n\t" \
2003 "movq 8(%%rax), %%rdi\n\t" \
2004 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2005 VALGRIND_CALL_NOREDIR_RAX \
2006 VALGRIND_RESTORE_STACK \
2007 VALGRIND_CFI_EPILOGUE \
2008 : /*out*/ "=a" (_res) \
2009 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2010 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2011 ); \
2012 lval = (__typeof__(lval)) _res; \
2013 } while (0)
2014
2015#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2016 arg7,arg8,arg9,arg10) \
2017 do { \
2018 volatile OrigFn _orig = (orig); \
2019 volatile unsigned long _argvec[11]; \
2020 volatile unsigned long _res; \
2021 _argvec[0] = (unsigned long)_orig.nraddr; \
2022 _argvec[1] = (unsigned long)(arg1); \
2023 _argvec[2] = (unsigned long)(arg2); \
2024 _argvec[3] = (unsigned long)(arg3); \
2025 _argvec[4] = (unsigned long)(arg4); \
2026 _argvec[5] = (unsigned long)(arg5); \
2027 _argvec[6] = (unsigned long)(arg6); \
2028 _argvec[7] = (unsigned long)(arg7); \
2029 _argvec[8] = (unsigned long)(arg8); \
2030 _argvec[9] = (unsigned long)(arg9); \
2031 _argvec[10] = (unsigned long)(arg10); \
2032 __asm__ volatile( \
2033 VALGRIND_CFI_PROLOGUE \
2034 VALGRIND_ALIGN_STACK \
2035 "subq $128,%%rsp\n\t" \
2036 "pushq 80(%%rax)\n\t" \
2037 "pushq 72(%%rax)\n\t" \
2038 "pushq 64(%%rax)\n\t" \
2039 "pushq 56(%%rax)\n\t" \
2040 "movq 48(%%rax), %%r9\n\t" \
2041 "movq 40(%%rax), %%r8\n\t" \
2042 "movq 32(%%rax), %%rcx\n\t" \
2043 "movq 24(%%rax), %%rdx\n\t" \
2044 "movq 16(%%rax), %%rsi\n\t" \
2045 "movq 8(%%rax), %%rdi\n\t" \
2046 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2047 VALGRIND_CALL_NOREDIR_RAX \
2048 VALGRIND_RESTORE_STACK \
2049 VALGRIND_CFI_EPILOGUE \
2050 : /*out*/ "=a" (_res) \
2051 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2052 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2053 ); \
2054 lval = (__typeof__(lval)) _res; \
2055 } while (0)
2056
2057#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2058 arg7,arg8,arg9,arg10,arg11) \
2059 do { \
2060 volatile OrigFn _orig = (orig); \
2061 volatile unsigned long _argvec[12]; \
2062 volatile unsigned long _res; \
2063 _argvec[0] = (unsigned long)_orig.nraddr; \
2064 _argvec[1] = (unsigned long)(arg1); \
2065 _argvec[2] = (unsigned long)(arg2); \
2066 _argvec[3] = (unsigned long)(arg3); \
2067 _argvec[4] = (unsigned long)(arg4); \
2068 _argvec[5] = (unsigned long)(arg5); \
2069 _argvec[6] = (unsigned long)(arg6); \
2070 _argvec[7] = (unsigned long)(arg7); \
2071 _argvec[8] = (unsigned long)(arg8); \
2072 _argvec[9] = (unsigned long)(arg9); \
2073 _argvec[10] = (unsigned long)(arg10); \
2074 _argvec[11] = (unsigned long)(arg11); \
2075 __asm__ volatile( \
2076 VALGRIND_CFI_PROLOGUE \
2077 VALGRIND_ALIGN_STACK \
2078 "subq $136,%%rsp\n\t" \
2079 "pushq 88(%%rax)\n\t" \
2080 "pushq 80(%%rax)\n\t" \
2081 "pushq 72(%%rax)\n\t" \
2082 "pushq 64(%%rax)\n\t" \
2083 "pushq 56(%%rax)\n\t" \
2084 "movq 48(%%rax), %%r9\n\t" \
2085 "movq 40(%%rax), %%r8\n\t" \
2086 "movq 32(%%rax), %%rcx\n\t" \
2087 "movq 24(%%rax), %%rdx\n\t" \
2088 "movq 16(%%rax), %%rsi\n\t" \
2089 "movq 8(%%rax), %%rdi\n\t" \
2090 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2091 VALGRIND_CALL_NOREDIR_RAX \
2092 VALGRIND_RESTORE_STACK \
2093 VALGRIND_CFI_EPILOGUE \
2094 : /*out*/ "=a" (_res) \
2095 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2096 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2097 ); \
2098 lval = (__typeof__(lval)) _res; \
2099 } while (0)
2100
2101#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2102 arg7,arg8,arg9,arg10,arg11,arg12) \
2103 do { \
2104 volatile OrigFn _orig = (orig); \
2105 volatile unsigned long _argvec[13]; \
2106 volatile unsigned long _res; \
2107 _argvec[0] = (unsigned long)_orig.nraddr; \
2108 _argvec[1] = (unsigned long)(arg1); \
2109 _argvec[2] = (unsigned long)(arg2); \
2110 _argvec[3] = (unsigned long)(arg3); \
2111 _argvec[4] = (unsigned long)(arg4); \
2112 _argvec[5] = (unsigned long)(arg5); \
2113 _argvec[6] = (unsigned long)(arg6); \
2114 _argvec[7] = (unsigned long)(arg7); \
2115 _argvec[8] = (unsigned long)(arg8); \
2116 _argvec[9] = (unsigned long)(arg9); \
2117 _argvec[10] = (unsigned long)(arg10); \
2118 _argvec[11] = (unsigned long)(arg11); \
2119 _argvec[12] = (unsigned long)(arg12); \
2120 __asm__ volatile( \
2121 VALGRIND_CFI_PROLOGUE \
2122 VALGRIND_ALIGN_STACK \
2123 "subq $128,%%rsp\n\t" \
2124 "pushq 96(%%rax)\n\t" \
2125 "pushq 88(%%rax)\n\t" \
2126 "pushq 80(%%rax)\n\t" \
2127 "pushq 72(%%rax)\n\t" \
2128 "pushq 64(%%rax)\n\t" \
2129 "pushq 56(%%rax)\n\t" \
2130 "movq 48(%%rax), %%r9\n\t" \
2131 "movq 40(%%rax), %%r8\n\t" \
2132 "movq 32(%%rax), %%rcx\n\t" \
2133 "movq 24(%%rax), %%rdx\n\t" \
2134 "movq 16(%%rax), %%rsi\n\t" \
2135 "movq 8(%%rax), %%rdi\n\t" \
2136 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2137 VALGRIND_CALL_NOREDIR_RAX \
2138 VALGRIND_RESTORE_STACK \
2139 VALGRIND_CFI_EPILOGUE \
2140 : /*out*/ "=a" (_res) \
2141 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2142 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2143 ); \
2144 lval = (__typeof__(lval)) _res; \
2145 } while (0)
2146
2147#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2148
2149/* ------------------------ ppc32-linux ------------------------ */
2150
2151#if defined(PLAT_ppc32_linux)
2152
2153/* This is useful for finding out about the on-stack stuff:
2154
2155 extern int f9 ( int,int,int,int,int,int,int,int,int );
2156 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2157 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2158 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2159
2160 int g9 ( void ) {
2161 return f9(11,22,33,44,55,66,77,88,99);
2162 }
2163 int g10 ( void ) {
2164 return f10(11,22,33,44,55,66,77,88,99,110);
2165 }
2166 int g11 ( void ) {
2167 return f11(11,22,33,44,55,66,77,88,99,110,121);
2168 }
2169 int g12 ( void ) {
2170 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2171 }
2172*/
2173
2174/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2175
2176/* These regs are trashed by the hidden call. */
2177#define __CALLER_SAVED_REGS \
2178 "lr", "ctr", "xer", \
2179 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2180 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2181 "r11", "r12", "r13"
2182
2183/* Macros to save and align the stack before making a function
2184 call and restore it afterwards as gcc may not keep the stack
2185 pointer aligned if it doesn't realise calls are being made
2186 to other functions. */
2187
2188#define VALGRIND_ALIGN_STACK \
2189 "mr 28,1\n\t" \
2190 "rlwinm 1,1,0,0,27\n\t"
2191#define VALGRIND_RESTORE_STACK \
2192 "mr 1,28\n\t"
2193
2194/* These CALL_FN_ macros assume that on ppc32-linux,
2195 sizeof(unsigned long) == 4. */
2196
2197#define CALL_FN_W_v(lval, orig) \
2198 do { \
2199 volatile OrigFn _orig = (orig); \
2200 volatile unsigned long _argvec[1]; \
2201 volatile unsigned long _res; \
2202 _argvec[0] = (unsigned long)_orig.nraddr; \
2203 __asm__ volatile( \
2204 VALGRIND_ALIGN_STACK \
2205 "mr 11,%1\n\t" \
2206 "lwz 11,0(11)\n\t" /* target->r11 */ \
2207 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2208 VALGRIND_RESTORE_STACK \
2209 "mr %0,3" \
2210 : /*out*/ "=r" (_res) \
2211 : /*in*/ "r" (&_argvec[0]) \
2212 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2213 ); \
2214 lval = (__typeof__(lval)) _res; \
2215 } while (0)
2216
2217#define CALL_FN_W_W(lval, orig, arg1) \
2218 do { \
2219 volatile OrigFn _orig = (orig); \
2220 volatile unsigned long _argvec[2]; \
2221 volatile unsigned long _res; \
2222 _argvec[0] = (unsigned long)_orig.nraddr; \
2223 _argvec[1] = (unsigned long)arg1; \
2224 __asm__ volatile( \
2225 VALGRIND_ALIGN_STACK \
2226 "mr 11,%1\n\t" \
2227 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2228 "lwz 11,0(11)\n\t" /* target->r11 */ \
2229 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2230 VALGRIND_RESTORE_STACK \
2231 "mr %0,3" \
2232 : /*out*/ "=r" (_res) \
2233 : /*in*/ "r" (&_argvec[0]) \
2234 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2235 ); \
2236 lval = (__typeof__(lval)) _res; \
2237 } while (0)
2238
2239#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2240 do { \
2241 volatile OrigFn _orig = (orig); \
2242 volatile unsigned long _argvec[3]; \
2243 volatile unsigned long _res; \
2244 _argvec[0] = (unsigned long)_orig.nraddr; \
2245 _argvec[1] = (unsigned long)arg1; \
2246 _argvec[2] = (unsigned long)arg2; \
2247 __asm__ volatile( \
2248 VALGRIND_ALIGN_STACK \
2249 "mr 11,%1\n\t" \
2250 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2251 "lwz 4,8(11)\n\t" \
2252 "lwz 11,0(11)\n\t" /* target->r11 */ \
2253 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2254 VALGRIND_RESTORE_STACK \
2255 "mr %0,3" \
2256 : /*out*/ "=r" (_res) \
2257 : /*in*/ "r" (&_argvec[0]) \
2258 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2259 ); \
2260 lval = (__typeof__(lval)) _res; \
2261 } while (0)
2262
2263#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2264 do { \
2265 volatile OrigFn _orig = (orig); \
2266 volatile unsigned long _argvec[4]; \
2267 volatile unsigned long _res; \
2268 _argvec[0] = (unsigned long)_orig.nraddr; \
2269 _argvec[1] = (unsigned long)arg1; \
2270 _argvec[2] = (unsigned long)arg2; \
2271 _argvec[3] = (unsigned long)arg3; \
2272 __asm__ volatile( \
2273 VALGRIND_ALIGN_STACK \
2274 "mr 11,%1\n\t" \
2275 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2276 "lwz 4,8(11)\n\t" \
2277 "lwz 5,12(11)\n\t" \
2278 "lwz 11,0(11)\n\t" /* target->r11 */ \
2279 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2280 VALGRIND_RESTORE_STACK \
2281 "mr %0,3" \
2282 : /*out*/ "=r" (_res) \
2283 : /*in*/ "r" (&_argvec[0]) \
2284 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2285 ); \
2286 lval = (__typeof__(lval)) _res; \
2287 } while (0)
2288
2289#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2290 do { \
2291 volatile OrigFn _orig = (orig); \
2292 volatile unsigned long _argvec[5]; \
2293 volatile unsigned long _res; \
2294 _argvec[0] = (unsigned long)_orig.nraddr; \
2295 _argvec[1] = (unsigned long)arg1; \
2296 _argvec[2] = (unsigned long)arg2; \
2297 _argvec[3] = (unsigned long)arg3; \
2298 _argvec[4] = (unsigned long)arg4; \
2299 __asm__ volatile( \
2300 VALGRIND_ALIGN_STACK \
2301 "mr 11,%1\n\t" \
2302 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2303 "lwz 4,8(11)\n\t" \
2304 "lwz 5,12(11)\n\t" \
2305 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2306 "lwz 11,0(11)\n\t" /* target->r11 */ \
2307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2308 VALGRIND_RESTORE_STACK \
2309 "mr %0,3" \
2310 : /*out*/ "=r" (_res) \
2311 : /*in*/ "r" (&_argvec[0]) \
2312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2313 ); \
2314 lval = (__typeof__(lval)) _res; \
2315 } while (0)
2316
2317#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2318 do { \
2319 volatile OrigFn _orig = (orig); \
2320 volatile unsigned long _argvec[6]; \
2321 volatile unsigned long _res; \
2322 _argvec[0] = (unsigned long)_orig.nraddr; \
2323 _argvec[1] = (unsigned long)arg1; \
2324 _argvec[2] = (unsigned long)arg2; \
2325 _argvec[3] = (unsigned long)arg3; \
2326 _argvec[4] = (unsigned long)arg4; \
2327 _argvec[5] = (unsigned long)arg5; \
2328 __asm__ volatile( \
2329 VALGRIND_ALIGN_STACK \
2330 "mr 11,%1\n\t" \
2331 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2332 "lwz 4,8(11)\n\t" \
2333 "lwz 5,12(11)\n\t" \
2334 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2335 "lwz 7,20(11)\n\t" \
2336 "lwz 11,0(11)\n\t" /* target->r11 */ \
2337 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2338 VALGRIND_RESTORE_STACK \
2339 "mr %0,3" \
2340 : /*out*/ "=r" (_res) \
2341 : /*in*/ "r" (&_argvec[0]) \
2342 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2343 ); \
2344 lval = (__typeof__(lval)) _res; \
2345 } while (0)
2346
2347#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2348 do { \
2349 volatile OrigFn _orig = (orig); \
2350 volatile unsigned long _argvec[7]; \
2351 volatile unsigned long _res; \
2352 _argvec[0] = (unsigned long)_orig.nraddr; \
2353 _argvec[1] = (unsigned long)arg1; \
2354 _argvec[2] = (unsigned long)arg2; \
2355 _argvec[3] = (unsigned long)arg3; \
2356 _argvec[4] = (unsigned long)arg4; \
2357 _argvec[5] = (unsigned long)arg5; \
2358 _argvec[6] = (unsigned long)arg6; \
2359 __asm__ volatile( \
2360 VALGRIND_ALIGN_STACK \
2361 "mr 11,%1\n\t" \
2362 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2363 "lwz 4,8(11)\n\t" \
2364 "lwz 5,12(11)\n\t" \
2365 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2366 "lwz 7,20(11)\n\t" \
2367 "lwz 8,24(11)\n\t" \
2368 "lwz 11,0(11)\n\t" /* target->r11 */ \
2369 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2370 VALGRIND_RESTORE_STACK \
2371 "mr %0,3" \
2372 : /*out*/ "=r" (_res) \
2373 : /*in*/ "r" (&_argvec[0]) \
2374 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2375 ); \
2376 lval = (__typeof__(lval)) _res; \
2377 } while (0)
2378
2379#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2380 arg7) \
2381 do { \
2382 volatile OrigFn _orig = (orig); \
2383 volatile unsigned long _argvec[8]; \
2384 volatile unsigned long _res; \
2385 _argvec[0] = (unsigned long)_orig.nraddr; \
2386 _argvec[1] = (unsigned long)arg1; \
2387 _argvec[2] = (unsigned long)arg2; \
2388 _argvec[3] = (unsigned long)arg3; \
2389 _argvec[4] = (unsigned long)arg4; \
2390 _argvec[5] = (unsigned long)arg5; \
2391 _argvec[6] = (unsigned long)arg6; \
2392 _argvec[7] = (unsigned long)arg7; \
2393 __asm__ volatile( \
2394 VALGRIND_ALIGN_STACK \
2395 "mr 11,%1\n\t" \
2396 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2397 "lwz 4,8(11)\n\t" \
2398 "lwz 5,12(11)\n\t" \
2399 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2400 "lwz 7,20(11)\n\t" \
2401 "lwz 8,24(11)\n\t" \
2402 "lwz 9,28(11)\n\t" \
2403 "lwz 11,0(11)\n\t" /* target->r11 */ \
2404 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2405 VALGRIND_RESTORE_STACK \
2406 "mr %0,3" \
2407 : /*out*/ "=r" (_res) \
2408 : /*in*/ "r" (&_argvec[0]) \
2409 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2410 ); \
2411 lval = (__typeof__(lval)) _res; \
2412 } while (0)
2413
2414#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2415 arg7,arg8) \
2416 do { \
2417 volatile OrigFn _orig = (orig); \
2418 volatile unsigned long _argvec[9]; \
2419 volatile unsigned long _res; \
2420 _argvec[0] = (unsigned long)_orig.nraddr; \
2421 _argvec[1] = (unsigned long)arg1; \
2422 _argvec[2] = (unsigned long)arg2; \
2423 _argvec[3] = (unsigned long)arg3; \
2424 _argvec[4] = (unsigned long)arg4; \
2425 _argvec[5] = (unsigned long)arg5; \
2426 _argvec[6] = (unsigned long)arg6; \
2427 _argvec[7] = (unsigned long)arg7; \
2428 _argvec[8] = (unsigned long)arg8; \
2429 __asm__ volatile( \
2430 VALGRIND_ALIGN_STACK \
2431 "mr 11,%1\n\t" \
2432 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2433 "lwz 4,8(11)\n\t" \
2434 "lwz 5,12(11)\n\t" \
2435 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2436 "lwz 7,20(11)\n\t" \
2437 "lwz 8,24(11)\n\t" \
2438 "lwz 9,28(11)\n\t" \
2439 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2440 "lwz 11,0(11)\n\t" /* target->r11 */ \
2441 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2442 VALGRIND_RESTORE_STACK \
2443 "mr %0,3" \
2444 : /*out*/ "=r" (_res) \
2445 : /*in*/ "r" (&_argvec[0]) \
2446 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2447 ); \
2448 lval = (__typeof__(lval)) _res; \
2449 } while (0)
2450
2451#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2452 arg7,arg8,arg9) \
2453 do { \
2454 volatile OrigFn _orig = (orig); \
2455 volatile unsigned long _argvec[10]; \
2456 volatile unsigned long _res; \
2457 _argvec[0] = (unsigned long)_orig.nraddr; \
2458 _argvec[1] = (unsigned long)arg1; \
2459 _argvec[2] = (unsigned long)arg2; \
2460 _argvec[3] = (unsigned long)arg3; \
2461 _argvec[4] = (unsigned long)arg4; \
2462 _argvec[5] = (unsigned long)arg5; \
2463 _argvec[6] = (unsigned long)arg6; \
2464 _argvec[7] = (unsigned long)arg7; \
2465 _argvec[8] = (unsigned long)arg8; \
2466 _argvec[9] = (unsigned long)arg9; \
2467 __asm__ volatile( \
2468 VALGRIND_ALIGN_STACK \
2469 "mr 11,%1\n\t" \
2470 "addi 1,1,-16\n\t" \
2471 /* arg9 */ \
2472 "lwz 3,36(11)\n\t" \
2473 "stw 3,8(1)\n\t" \
2474 /* args1-8 */ \
2475 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2476 "lwz 4,8(11)\n\t" \
2477 "lwz 5,12(11)\n\t" \
2478 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2479 "lwz 7,20(11)\n\t" \
2480 "lwz 8,24(11)\n\t" \
2481 "lwz 9,28(11)\n\t" \
2482 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2483 "lwz 11,0(11)\n\t" /* target->r11 */ \
2484 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2485 VALGRIND_RESTORE_STACK \
2486 "mr %0,3" \
2487 : /*out*/ "=r" (_res) \
2488 : /*in*/ "r" (&_argvec[0]) \
2489 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2490 ); \
2491 lval = (__typeof__(lval)) _res; \
2492 } while (0)
2493
2494#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2495 arg7,arg8,arg9,arg10) \
2496 do { \
2497 volatile OrigFn _orig = (orig); \
2498 volatile unsigned long _argvec[11]; \
2499 volatile unsigned long _res; \
2500 _argvec[0] = (unsigned long)_orig.nraddr; \
2501 _argvec[1] = (unsigned long)arg1; \
2502 _argvec[2] = (unsigned long)arg2; \
2503 _argvec[3] = (unsigned long)arg3; \
2504 _argvec[4] = (unsigned long)arg4; \
2505 _argvec[5] = (unsigned long)arg5; \
2506 _argvec[6] = (unsigned long)arg6; \
2507 _argvec[7] = (unsigned long)arg7; \
2508 _argvec[8] = (unsigned long)arg8; \
2509 _argvec[9] = (unsigned long)arg9; \
2510 _argvec[10] = (unsigned long)arg10; \
2511 __asm__ volatile( \
2512 VALGRIND_ALIGN_STACK \
2513 "mr 11,%1\n\t" \
2514 "addi 1,1,-16\n\t" \
2515 /* arg10 */ \
2516 "lwz 3,40(11)\n\t" \
2517 "stw 3,12(1)\n\t" \
2518 /* arg9 */ \
2519 "lwz 3,36(11)\n\t" \
2520 "stw 3,8(1)\n\t" \
2521 /* args1-8 */ \
2522 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2523 "lwz 4,8(11)\n\t" \
2524 "lwz 5,12(11)\n\t" \
2525 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2526 "lwz 7,20(11)\n\t" \
2527 "lwz 8,24(11)\n\t" \
2528 "lwz 9,28(11)\n\t" \
2529 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2530 "lwz 11,0(11)\n\t" /* target->r11 */ \
2531 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2532 VALGRIND_RESTORE_STACK \
2533 "mr %0,3" \
2534 : /*out*/ "=r" (_res) \
2535 : /*in*/ "r" (&_argvec[0]) \
2536 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2537 ); \
2538 lval = (__typeof__(lval)) _res; \
2539 } while (0)
2540
2541#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2542 arg7,arg8,arg9,arg10,arg11) \
2543 do { \
2544 volatile OrigFn _orig = (orig); \
2545 volatile unsigned long _argvec[12]; \
2546 volatile unsigned long _res; \
2547 _argvec[0] = (unsigned long)_orig.nraddr; \
2548 _argvec[1] = (unsigned long)arg1; \
2549 _argvec[2] = (unsigned long)arg2; \
2550 _argvec[3] = (unsigned long)arg3; \
2551 _argvec[4] = (unsigned long)arg4; \
2552 _argvec[5] = (unsigned long)arg5; \
2553 _argvec[6] = (unsigned long)arg6; \
2554 _argvec[7] = (unsigned long)arg7; \
2555 _argvec[8] = (unsigned long)arg8; \
2556 _argvec[9] = (unsigned long)arg9; \
2557 _argvec[10] = (unsigned long)arg10; \
2558 _argvec[11] = (unsigned long)arg11; \
2559 __asm__ volatile( \
2560 VALGRIND_ALIGN_STACK \
2561 "mr 11,%1\n\t" \
2562 "addi 1,1,-32\n\t" \
2563 /* arg11 */ \
2564 "lwz 3,44(11)\n\t" \
2565 "stw 3,16(1)\n\t" \
2566 /* arg10 */ \
2567 "lwz 3,40(11)\n\t" \
2568 "stw 3,12(1)\n\t" \
2569 /* arg9 */ \
2570 "lwz 3,36(11)\n\t" \
2571 "stw 3,8(1)\n\t" \
2572 /* args1-8 */ \
2573 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2574 "lwz 4,8(11)\n\t" \
2575 "lwz 5,12(11)\n\t" \
2576 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2577 "lwz 7,20(11)\n\t" \
2578 "lwz 8,24(11)\n\t" \
2579 "lwz 9,28(11)\n\t" \
2580 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2581 "lwz 11,0(11)\n\t" /* target->r11 */ \
2582 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2583 VALGRIND_RESTORE_STACK \
2584 "mr %0,3" \
2585 : /*out*/ "=r" (_res) \
2586 : /*in*/ "r" (&_argvec[0]) \
2587 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2588 ); \
2589 lval = (__typeof__(lval)) _res; \
2590 } while (0)
2591
2592#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2593 arg7,arg8,arg9,arg10,arg11,arg12) \
2594 do { \
2595 volatile OrigFn _orig = (orig); \
2596 volatile unsigned long _argvec[13]; \
2597 volatile unsigned long _res; \
2598 _argvec[0] = (unsigned long)_orig.nraddr; \
2599 _argvec[1] = (unsigned long)arg1; \
2600 _argvec[2] = (unsigned long)arg2; \
2601 _argvec[3] = (unsigned long)arg3; \
2602 _argvec[4] = (unsigned long)arg4; \
2603 _argvec[5] = (unsigned long)arg5; \
2604 _argvec[6] = (unsigned long)arg6; \
2605 _argvec[7] = (unsigned long)arg7; \
2606 _argvec[8] = (unsigned long)arg8; \
2607 _argvec[9] = (unsigned long)arg9; \
2608 _argvec[10] = (unsigned long)arg10; \
2609 _argvec[11] = (unsigned long)arg11; \
2610 _argvec[12] = (unsigned long)arg12; \
2611 __asm__ volatile( \
2612 VALGRIND_ALIGN_STACK \
2613 "mr 11,%1\n\t" \
2614 "addi 1,1,-32\n\t" \
2615 /* arg12 */ \
2616 "lwz 3,48(11)\n\t" \
2617 "stw 3,20(1)\n\t" \
2618 /* arg11 */ \
2619 "lwz 3,44(11)\n\t" \
2620 "stw 3,16(1)\n\t" \
2621 /* arg10 */ \
2622 "lwz 3,40(11)\n\t" \
2623 "stw 3,12(1)\n\t" \
2624 /* arg9 */ \
2625 "lwz 3,36(11)\n\t" \
2626 "stw 3,8(1)\n\t" \
2627 /* args1-8 */ \
2628 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2629 "lwz 4,8(11)\n\t" \
2630 "lwz 5,12(11)\n\t" \
2631 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2632 "lwz 7,20(11)\n\t" \
2633 "lwz 8,24(11)\n\t" \
2634 "lwz 9,28(11)\n\t" \
2635 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2636 "lwz 11,0(11)\n\t" /* target->r11 */ \
2637 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2638 VALGRIND_RESTORE_STACK \
2639 "mr %0,3" \
2640 : /*out*/ "=r" (_res) \
2641 : /*in*/ "r" (&_argvec[0]) \
2642 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2643 ); \
2644 lval = (__typeof__(lval)) _res; \
2645 } while (0)
2646
2647#endif /* PLAT_ppc32_linux */
2648
2649/* ------------------------ ppc64-linux ------------------------ */
2650
2651#if defined(PLAT_ppc64be_linux)
2652
2653/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2654
2655/* These regs are trashed by the hidden call. */
2656#define __CALLER_SAVED_REGS \
2657 "lr", "ctr", "xer", \
2658 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2659 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2660 "r11", "r12", "r13"
2661
2662/* Macros to save and align the stack before making a function
2663 call and restore it afterwards as gcc may not keep the stack
2664 pointer aligned if it doesn't realise calls are being made
2665 to other functions. */
2666
2667#define VALGRIND_ALIGN_STACK \
2668 "mr 28,1\n\t" \
2669 "rldicr 1,1,0,59\n\t"
2670#define VALGRIND_RESTORE_STACK \
2671 "mr 1,28\n\t"
2672
2673/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2674 long) == 8. */
2675
2676#define CALL_FN_W_v(lval, orig) \
2677 do { \
2678 volatile OrigFn _orig = (orig); \
2679 volatile unsigned long _argvec[3+0]; \
2680 volatile unsigned long _res; \
2681 /* _argvec[0] holds current r2 across the call */ \
2682 _argvec[1] = (unsigned long)_orig.r2; \
2683 _argvec[2] = (unsigned long)_orig.nraddr; \
2684 __asm__ volatile( \
2685 VALGRIND_ALIGN_STACK \
2686 "mr 11,%1\n\t" \
2687 "std 2,-16(11)\n\t" /* save tocptr */ \
2688 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2689 "ld 11, 0(11)\n\t" /* target->r11 */ \
2690 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2691 "mr 11,%1\n\t" \
2692 "mr %0,3\n\t" \
2693 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2694 VALGRIND_RESTORE_STACK \
2695 : /*out*/ "=r" (_res) \
2696 : /*in*/ "r" (&_argvec[2]) \
2697 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2698 ); \
2699 lval = (__typeof__(lval)) _res; \
2700 } while (0)
2701
2702#define CALL_FN_W_W(lval, orig, arg1) \
2703 do { \
2704 volatile OrigFn _orig = (orig); \
2705 volatile unsigned long _argvec[3+1]; \
2706 volatile unsigned long _res; \
2707 /* _argvec[0] holds current r2 across the call */ \
2708 _argvec[1] = (unsigned long)_orig.r2; \
2709 _argvec[2] = (unsigned long)_orig.nraddr; \
2710 _argvec[2+1] = (unsigned long)arg1; \
2711 __asm__ volatile( \
2712 VALGRIND_ALIGN_STACK \
2713 "mr 11,%1\n\t" \
2714 "std 2,-16(11)\n\t" /* save tocptr */ \
2715 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2716 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2717 "ld 11, 0(11)\n\t" /* target->r11 */ \
2718 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2719 "mr 11,%1\n\t" \
2720 "mr %0,3\n\t" \
2721 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2722 VALGRIND_RESTORE_STACK \
2723 : /*out*/ "=r" (_res) \
2724 : /*in*/ "r" (&_argvec[2]) \
2725 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2726 ); \
2727 lval = (__typeof__(lval)) _res; \
2728 } while (0)
2729
2730#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2731 do { \
2732 volatile OrigFn _orig = (orig); \
2733 volatile unsigned long _argvec[3+2]; \
2734 volatile unsigned long _res; \
2735 /* _argvec[0] holds current r2 across the call */ \
2736 _argvec[1] = (unsigned long)_orig.r2; \
2737 _argvec[2] = (unsigned long)_orig.nraddr; \
2738 _argvec[2+1] = (unsigned long)arg1; \
2739 _argvec[2+2] = (unsigned long)arg2; \
2740 __asm__ volatile( \
2741 VALGRIND_ALIGN_STACK \
2742 "mr 11,%1\n\t" \
2743 "std 2,-16(11)\n\t" /* save tocptr */ \
2744 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2745 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2746 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2747 "ld 11, 0(11)\n\t" /* target->r11 */ \
2748 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2749 "mr 11,%1\n\t" \
2750 "mr %0,3\n\t" \
2751 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2752 VALGRIND_RESTORE_STACK \
2753 : /*out*/ "=r" (_res) \
2754 : /*in*/ "r" (&_argvec[2]) \
2755 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2756 ); \
2757 lval = (__typeof__(lval)) _res; \
2758 } while (0)
2759
2760#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2761 do { \
2762 volatile OrigFn _orig = (orig); \
2763 volatile unsigned long _argvec[3+3]; \
2764 volatile unsigned long _res; \
2765 /* _argvec[0] holds current r2 across the call */ \
2766 _argvec[1] = (unsigned long)_orig.r2; \
2767 _argvec[2] = (unsigned long)_orig.nraddr; \
2768 _argvec[2+1] = (unsigned long)arg1; \
2769 _argvec[2+2] = (unsigned long)arg2; \
2770 _argvec[2+3] = (unsigned long)arg3; \
2771 __asm__ volatile( \
2772 VALGRIND_ALIGN_STACK \
2773 "mr 11,%1\n\t" \
2774 "std 2,-16(11)\n\t" /* save tocptr */ \
2775 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2776 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2777 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2778 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2779 "ld 11, 0(11)\n\t" /* target->r11 */ \
2780 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2781 "mr 11,%1\n\t" \
2782 "mr %0,3\n\t" \
2783 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2784 VALGRIND_RESTORE_STACK \
2785 : /*out*/ "=r" (_res) \
2786 : /*in*/ "r" (&_argvec[2]) \
2787 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2788 ); \
2789 lval = (__typeof__(lval)) _res; \
2790 } while (0)
2791
2792#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2793 do { \
2794 volatile OrigFn _orig = (orig); \
2795 volatile unsigned long _argvec[3+4]; \
2796 volatile unsigned long _res; \
2797 /* _argvec[0] holds current r2 across the call */ \
2798 _argvec[1] = (unsigned long)_orig.r2; \
2799 _argvec[2] = (unsigned long)_orig.nraddr; \
2800 _argvec[2+1] = (unsigned long)arg1; \
2801 _argvec[2+2] = (unsigned long)arg2; \
2802 _argvec[2+3] = (unsigned long)arg3; \
2803 _argvec[2+4] = (unsigned long)arg4; \
2804 __asm__ volatile( \
2805 VALGRIND_ALIGN_STACK \
2806 "mr 11,%1\n\t" \
2807 "std 2,-16(11)\n\t" /* save tocptr */ \
2808 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2809 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2810 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2811 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2812 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2813 "ld 11, 0(11)\n\t" /* target->r11 */ \
2814 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2815 "mr 11,%1\n\t" \
2816 "mr %0,3\n\t" \
2817 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2818 VALGRIND_RESTORE_STACK \
2819 : /*out*/ "=r" (_res) \
2820 : /*in*/ "r" (&_argvec[2]) \
2821 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2822 ); \
2823 lval = (__typeof__(lval)) _res; \
2824 } while (0)
2825
2826#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2827 do { \
2828 volatile OrigFn _orig = (orig); \
2829 volatile unsigned long _argvec[3+5]; \
2830 volatile unsigned long _res; \
2831 /* _argvec[0] holds current r2 across the call */ \
2832 _argvec[1] = (unsigned long)_orig.r2; \
2833 _argvec[2] = (unsigned long)_orig.nraddr; \
2834 _argvec[2+1] = (unsigned long)arg1; \
2835 _argvec[2+2] = (unsigned long)arg2; \
2836 _argvec[2+3] = (unsigned long)arg3; \
2837 _argvec[2+4] = (unsigned long)arg4; \
2838 _argvec[2+5] = (unsigned long)arg5; \
2839 __asm__ volatile( \
2840 VALGRIND_ALIGN_STACK \
2841 "mr 11,%1\n\t" \
2842 "std 2,-16(11)\n\t" /* save tocptr */ \
2843 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2844 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2845 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2846 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2847 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2848 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2849 "ld 11, 0(11)\n\t" /* target->r11 */ \
2850 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2851 "mr 11,%1\n\t" \
2852 "mr %0,3\n\t" \
2853 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2854 VALGRIND_RESTORE_STACK \
2855 : /*out*/ "=r" (_res) \
2856 : /*in*/ "r" (&_argvec[2]) \
2857 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2858 ); \
2859 lval = (__typeof__(lval)) _res; \
2860 } while (0)
2861
2862#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2863 do { \
2864 volatile OrigFn _orig = (orig); \
2865 volatile unsigned long _argvec[3+6]; \
2866 volatile unsigned long _res; \
2867 /* _argvec[0] holds current r2 across the call */ \
2868 _argvec[1] = (unsigned long)_orig.r2; \
2869 _argvec[2] = (unsigned long)_orig.nraddr; \
2870 _argvec[2+1] = (unsigned long)arg1; \
2871 _argvec[2+2] = (unsigned long)arg2; \
2872 _argvec[2+3] = (unsigned long)arg3; \
2873 _argvec[2+4] = (unsigned long)arg4; \
2874 _argvec[2+5] = (unsigned long)arg5; \
2875 _argvec[2+6] = (unsigned long)arg6; \
2876 __asm__ volatile( \
2877 VALGRIND_ALIGN_STACK \
2878 "mr 11,%1\n\t" \
2879 "std 2,-16(11)\n\t" /* save tocptr */ \
2880 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2881 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2882 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2883 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2884 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2885 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2886 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2887 "ld 11, 0(11)\n\t" /* target->r11 */ \
2888 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2889 "mr 11,%1\n\t" \
2890 "mr %0,3\n\t" \
2891 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2892 VALGRIND_RESTORE_STACK \
2893 : /*out*/ "=r" (_res) \
2894 : /*in*/ "r" (&_argvec[2]) \
2895 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2896 ); \
2897 lval = (__typeof__(lval)) _res; \
2898 } while (0)
2899
2900#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2901 arg7) \
2902 do { \
2903 volatile OrigFn _orig = (orig); \
2904 volatile unsigned long _argvec[3+7]; \
2905 volatile unsigned long _res; \
2906 /* _argvec[0] holds current r2 across the call */ \
2907 _argvec[1] = (unsigned long)_orig.r2; \
2908 _argvec[2] = (unsigned long)_orig.nraddr; \
2909 _argvec[2+1] = (unsigned long)arg1; \
2910 _argvec[2+2] = (unsigned long)arg2; \
2911 _argvec[2+3] = (unsigned long)arg3; \
2912 _argvec[2+4] = (unsigned long)arg4; \
2913 _argvec[2+5] = (unsigned long)arg5; \
2914 _argvec[2+6] = (unsigned long)arg6; \
2915 _argvec[2+7] = (unsigned long)arg7; \
2916 __asm__ volatile( \
2917 VALGRIND_ALIGN_STACK \
2918 "mr 11,%1\n\t" \
2919 "std 2,-16(11)\n\t" /* save tocptr */ \
2920 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2921 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2922 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2923 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2924 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2925 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2926 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2927 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2928 "ld 11, 0(11)\n\t" /* target->r11 */ \
2929 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2930 "mr 11,%1\n\t" \
2931 "mr %0,3\n\t" \
2932 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2933 VALGRIND_RESTORE_STACK \
2934 : /*out*/ "=r" (_res) \
2935 : /*in*/ "r" (&_argvec[2]) \
2936 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2937 ); \
2938 lval = (__typeof__(lval)) _res; \
2939 } while (0)
2940
2941#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2942 arg7,arg8) \
2943 do { \
2944 volatile OrigFn _orig = (orig); \
2945 volatile unsigned long _argvec[3+8]; \
2946 volatile unsigned long _res; \
2947 /* _argvec[0] holds current r2 across the call */ \
2948 _argvec[1] = (unsigned long)_orig.r2; \
2949 _argvec[2] = (unsigned long)_orig.nraddr; \
2950 _argvec[2+1] = (unsigned long)arg1; \
2951 _argvec[2+2] = (unsigned long)arg2; \
2952 _argvec[2+3] = (unsigned long)arg3; \
2953 _argvec[2+4] = (unsigned long)arg4; \
2954 _argvec[2+5] = (unsigned long)arg5; \
2955 _argvec[2+6] = (unsigned long)arg6; \
2956 _argvec[2+7] = (unsigned long)arg7; \
2957 _argvec[2+8] = (unsigned long)arg8; \
2958 __asm__ volatile( \
2959 VALGRIND_ALIGN_STACK \
2960 "mr 11,%1\n\t" \
2961 "std 2,-16(11)\n\t" /* save tocptr */ \
2962 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2963 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2964 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2965 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2966 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2967 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2968 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2969 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2970 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2971 "ld 11, 0(11)\n\t" /* target->r11 */ \
2972 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2973 "mr 11,%1\n\t" \
2974 "mr %0,3\n\t" \
2975 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2976 VALGRIND_RESTORE_STACK \
2977 : /*out*/ "=r" (_res) \
2978 : /*in*/ "r" (&_argvec[2]) \
2979 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2980 ); \
2981 lval = (__typeof__(lval)) _res; \
2982 } while (0)
2983
2984#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2985 arg7,arg8,arg9) \
2986 do { \
2987 volatile OrigFn _orig = (orig); \
2988 volatile unsigned long _argvec[3+9]; \
2989 volatile unsigned long _res; \
2990 /* _argvec[0] holds current r2 across the call */ \
2991 _argvec[1] = (unsigned long)_orig.r2; \
2992 _argvec[2] = (unsigned long)_orig.nraddr; \
2993 _argvec[2+1] = (unsigned long)arg1; \
2994 _argvec[2+2] = (unsigned long)arg2; \
2995 _argvec[2+3] = (unsigned long)arg3; \
2996 _argvec[2+4] = (unsigned long)arg4; \
2997 _argvec[2+5] = (unsigned long)arg5; \
2998 _argvec[2+6] = (unsigned long)arg6; \
2999 _argvec[2+7] = (unsigned long)arg7; \
3000 _argvec[2+8] = (unsigned long)arg8; \
3001 _argvec[2+9] = (unsigned long)arg9; \
3002 __asm__ volatile( \
3003 VALGRIND_ALIGN_STACK \
3004 "mr 11,%1\n\t" \
3005 "std 2,-16(11)\n\t" /* save tocptr */ \
3006 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3007 "addi 1,1,-128\n\t" /* expand stack frame */ \
3008 /* arg9 */ \
3009 "ld 3,72(11)\n\t" \
3010 "std 3,112(1)\n\t" \
3011 /* args1-8 */ \
3012 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3013 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3014 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3015 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3016 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3017 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3018 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3019 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3020 "ld 11, 0(11)\n\t" /* target->r11 */ \
3021 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3022 "mr 11,%1\n\t" \
3023 "mr %0,3\n\t" \
3024 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3025 VALGRIND_RESTORE_STACK \
3026 : /*out*/ "=r" (_res) \
3027 : /*in*/ "r" (&_argvec[2]) \
3028 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3029 ); \
3030 lval = (__typeof__(lval)) _res; \
3031 } while (0)
3032
3033#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3034 arg7,arg8,arg9,arg10) \
3035 do { \
3036 volatile OrigFn _orig = (orig); \
3037 volatile unsigned long _argvec[3+10]; \
3038 volatile unsigned long _res; \
3039 /* _argvec[0] holds current r2 across the call */ \
3040 _argvec[1] = (unsigned long)_orig.r2; \
3041 _argvec[2] = (unsigned long)_orig.nraddr; \
3042 _argvec[2+1] = (unsigned long)arg1; \
3043 _argvec[2+2] = (unsigned long)arg2; \
3044 _argvec[2+3] = (unsigned long)arg3; \
3045 _argvec[2+4] = (unsigned long)arg4; \
3046 _argvec[2+5] = (unsigned long)arg5; \
3047 _argvec[2+6] = (unsigned long)arg6; \
3048 _argvec[2+7] = (unsigned long)arg7; \
3049 _argvec[2+8] = (unsigned long)arg8; \
3050 _argvec[2+9] = (unsigned long)arg9; \
3051 _argvec[2+10] = (unsigned long)arg10; \
3052 __asm__ volatile( \
3053 VALGRIND_ALIGN_STACK \
3054 "mr 11,%1\n\t" \
3055 "std 2,-16(11)\n\t" /* save tocptr */ \
3056 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3057 "addi 1,1,-128\n\t" /* expand stack frame */ \
3058 /* arg10 */ \
3059 "ld 3,80(11)\n\t" \
3060 "std 3,120(1)\n\t" \
3061 /* arg9 */ \
3062 "ld 3,72(11)\n\t" \
3063 "std 3,112(1)\n\t" \
3064 /* args1-8 */ \
3065 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3066 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3067 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3068 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3069 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3070 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3071 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3072 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3073 "ld 11, 0(11)\n\t" /* target->r11 */ \
3074 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3075 "mr 11,%1\n\t" \
3076 "mr %0,3\n\t" \
3077 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3078 VALGRIND_RESTORE_STACK \
3079 : /*out*/ "=r" (_res) \
3080 : /*in*/ "r" (&_argvec[2]) \
3081 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3082 ); \
3083 lval = (__typeof__(lval)) _res; \
3084 } while (0)
3085
3086#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3087 arg7,arg8,arg9,arg10,arg11) \
3088 do { \
3089 volatile OrigFn _orig = (orig); \
3090 volatile unsigned long _argvec[3+11]; \
3091 volatile unsigned long _res; \
3092 /* _argvec[0] holds current r2 across the call */ \
3093 _argvec[1] = (unsigned long)_orig.r2; \
3094 _argvec[2] = (unsigned long)_orig.nraddr; \
3095 _argvec[2+1] = (unsigned long)arg1; \
3096 _argvec[2+2] = (unsigned long)arg2; \
3097 _argvec[2+3] = (unsigned long)arg3; \
3098 _argvec[2+4] = (unsigned long)arg4; \
3099 _argvec[2+5] = (unsigned long)arg5; \
3100 _argvec[2+6] = (unsigned long)arg6; \
3101 _argvec[2+7] = (unsigned long)arg7; \
3102 _argvec[2+8] = (unsigned long)arg8; \
3103 _argvec[2+9] = (unsigned long)arg9; \
3104 _argvec[2+10] = (unsigned long)arg10; \
3105 _argvec[2+11] = (unsigned long)arg11; \
3106 __asm__ volatile( \
3107 VALGRIND_ALIGN_STACK \
3108 "mr 11,%1\n\t" \
3109 "std 2,-16(11)\n\t" /* save tocptr */ \
3110 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3111 "addi 1,1,-144\n\t" /* expand stack frame */ \
3112 /* arg11 */ \
3113 "ld 3,88(11)\n\t" \
3114 "std 3,128(1)\n\t" \
3115 /* arg10 */ \
3116 "ld 3,80(11)\n\t" \
3117 "std 3,120(1)\n\t" \
3118 /* arg9 */ \
3119 "ld 3,72(11)\n\t" \
3120 "std 3,112(1)\n\t" \
3121 /* args1-8 */ \
3122 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3123 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3124 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3125 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3126 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3127 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3128 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3129 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3130 "ld 11, 0(11)\n\t" /* target->r11 */ \
3131 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3132 "mr 11,%1\n\t" \
3133 "mr %0,3\n\t" \
3134 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3135 VALGRIND_RESTORE_STACK \
3136 : /*out*/ "=r" (_res) \
3137 : /*in*/ "r" (&_argvec[2]) \
3138 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3139 ); \
3140 lval = (__typeof__(lval)) _res; \
3141 } while (0)
3142
3143#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3144 arg7,arg8,arg9,arg10,arg11,arg12) \
3145 do { \
3146 volatile OrigFn _orig = (orig); \
3147 volatile unsigned long _argvec[3+12]; \
3148 volatile unsigned long _res; \
3149 /* _argvec[0] holds current r2 across the call */ \
3150 _argvec[1] = (unsigned long)_orig.r2; \
3151 _argvec[2] = (unsigned long)_orig.nraddr; \
3152 _argvec[2+1] = (unsigned long)arg1; \
3153 _argvec[2+2] = (unsigned long)arg2; \
3154 _argvec[2+3] = (unsigned long)arg3; \
3155 _argvec[2+4] = (unsigned long)arg4; \
3156 _argvec[2+5] = (unsigned long)arg5; \
3157 _argvec[2+6] = (unsigned long)arg6; \
3158 _argvec[2+7] = (unsigned long)arg7; \
3159 _argvec[2+8] = (unsigned long)arg8; \
3160 _argvec[2+9] = (unsigned long)arg9; \
3161 _argvec[2+10] = (unsigned long)arg10; \
3162 _argvec[2+11] = (unsigned long)arg11; \
3163 _argvec[2+12] = (unsigned long)arg12; \
3164 __asm__ volatile( \
3165 VALGRIND_ALIGN_STACK \
3166 "mr 11,%1\n\t" \
3167 "std 2,-16(11)\n\t" /* save tocptr */ \
3168 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3169 "addi 1,1,-144\n\t" /* expand stack frame */ \
3170 /* arg12 */ \
3171 "ld 3,96(11)\n\t" \
3172 "std 3,136(1)\n\t" \
3173 /* arg11 */ \
3174 "ld 3,88(11)\n\t" \
3175 "std 3,128(1)\n\t" \
3176 /* arg10 */ \
3177 "ld 3,80(11)\n\t" \
3178 "std 3,120(1)\n\t" \
3179 /* arg9 */ \
3180 "ld 3,72(11)\n\t" \
3181 "std 3,112(1)\n\t" \
3182 /* args1-8 */ \
3183 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3184 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3185 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3186 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3187 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3188 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3189 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3190 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3191 "ld 11, 0(11)\n\t" /* target->r11 */ \
3192 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3193 "mr 11,%1\n\t" \
3194 "mr %0,3\n\t" \
3195 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3196 VALGRIND_RESTORE_STACK \
3197 : /*out*/ "=r" (_res) \
3198 : /*in*/ "r" (&_argvec[2]) \
3199 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3200 ); \
3201 lval = (__typeof__(lval)) _res; \
3202 } while (0)
3203
3204#endif /* PLAT_ppc64be_linux */
3205
3206/* ------------------------- ppc64le-linux ----------------------- */
3207#if defined(PLAT_ppc64le_linux)
3208
3209/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3210
3211/* These regs are trashed by the hidden call. */
3212#define __CALLER_SAVED_REGS \
3213 "lr", "ctr", "xer", \
3214 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3215 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3216 "r11", "r12", "r13"
3217
3218/* Macros to save and align the stack before making a function
3219 call and restore it afterwards as gcc may not keep the stack
3220 pointer aligned if it doesn't realise calls are being made
3221 to other functions. */
3222
3223#define VALGRIND_ALIGN_STACK \
3224 "mr 28,1\n\t" \
3225 "rldicr 1,1,0,59\n\t"
3226#define VALGRIND_RESTORE_STACK \
3227 "mr 1,28\n\t"
3228
3229/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3230 long) == 8. */
3231
3232#define CALL_FN_W_v(lval, orig) \
3233 do { \
3234 volatile OrigFn _orig = (orig); \
3235 volatile unsigned long _argvec[3+0]; \
3236 volatile unsigned long _res; \
3237 /* _argvec[0] holds current r2 across the call */ \
3238 _argvec[1] = (unsigned long)_orig.r2; \
3239 _argvec[2] = (unsigned long)_orig.nraddr; \
3240 __asm__ volatile( \
3241 VALGRIND_ALIGN_STACK \
3242 "mr 12,%1\n\t" \
3243 "std 2,-16(12)\n\t" /* save tocptr */ \
3244 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3245 "ld 12, 0(12)\n\t" /* target->r12 */ \
3246 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3247 "mr 12,%1\n\t" \
3248 "mr %0,3\n\t" \
3249 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3250 VALGRIND_RESTORE_STACK \
3251 : /*out*/ "=r" (_res) \
3252 : /*in*/ "r" (&_argvec[2]) \
3253 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3254 ); \
3255 lval = (__typeof__(lval)) _res; \
3256 } while (0)
3257
3258#define CALL_FN_W_W(lval, orig, arg1) \
3259 do { \
3260 volatile OrigFn _orig = (orig); \
3261 volatile unsigned long _argvec[3+1]; \
3262 volatile unsigned long _res; \
3263 /* _argvec[0] holds current r2 across the call */ \
3264 _argvec[1] = (unsigned long)_orig.r2; \
3265 _argvec[2] = (unsigned long)_orig.nraddr; \
3266 _argvec[2+1] = (unsigned long)arg1; \
3267 __asm__ volatile( \
3268 VALGRIND_ALIGN_STACK \
3269 "mr 12,%1\n\t" \
3270 "std 2,-16(12)\n\t" /* save tocptr */ \
3271 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3272 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3273 "ld 12, 0(12)\n\t" /* target->r12 */ \
3274 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3275 "mr 12,%1\n\t" \
3276 "mr %0,3\n\t" \
3277 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3278 VALGRIND_RESTORE_STACK \
3279 : /*out*/ "=r" (_res) \
3280 : /*in*/ "r" (&_argvec[2]) \
3281 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3282 ); \
3283 lval = (__typeof__(lval)) _res; \
3284 } while (0)
3285
3286#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3287 do { \
3288 volatile OrigFn _orig = (orig); \
3289 volatile unsigned long _argvec[3+2]; \
3290 volatile unsigned long _res; \
3291 /* _argvec[0] holds current r2 across the call */ \
3292 _argvec[1] = (unsigned long)_orig.r2; \
3293 _argvec[2] = (unsigned long)_orig.nraddr; \
3294 _argvec[2+1] = (unsigned long)arg1; \
3295 _argvec[2+2] = (unsigned long)arg2; \
3296 __asm__ volatile( \
3297 VALGRIND_ALIGN_STACK \
3298 "mr 12,%1\n\t" \
3299 "std 2,-16(12)\n\t" /* save tocptr */ \
3300 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3301 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3302 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3303 "ld 12, 0(12)\n\t" /* target->r12 */ \
3304 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3305 "mr 12,%1\n\t" \
3306 "mr %0,3\n\t" \
3307 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3308 VALGRIND_RESTORE_STACK \
3309 : /*out*/ "=r" (_res) \
3310 : /*in*/ "r" (&_argvec[2]) \
3311 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3312 ); \
3313 lval = (__typeof__(lval)) _res; \
3314 } while (0)
3315
3316#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3317 do { \
3318 volatile OrigFn _orig = (orig); \
3319 volatile unsigned long _argvec[3+3]; \
3320 volatile unsigned long _res; \
3321 /* _argvec[0] holds current r2 across the call */ \
3322 _argvec[1] = (unsigned long)_orig.r2; \
3323 _argvec[2] = (unsigned long)_orig.nraddr; \
3324 _argvec[2+1] = (unsigned long)arg1; \
3325 _argvec[2+2] = (unsigned long)arg2; \
3326 _argvec[2+3] = (unsigned long)arg3; \
3327 __asm__ volatile( \
3328 VALGRIND_ALIGN_STACK \
3329 "mr 12,%1\n\t" \
3330 "std 2,-16(12)\n\t" /* save tocptr */ \
3331 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3332 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3333 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3334 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3335 "ld 12, 0(12)\n\t" /* target->r12 */ \
3336 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3337 "mr 12,%1\n\t" \
3338 "mr %0,3\n\t" \
3339 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3340 VALGRIND_RESTORE_STACK \
3341 : /*out*/ "=r" (_res) \
3342 : /*in*/ "r" (&_argvec[2]) \
3343 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3344 ); \
3345 lval = (__typeof__(lval)) _res; \
3346 } while (0)
3347
3348#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3349 do { \
3350 volatile OrigFn _orig = (orig); \
3351 volatile unsigned long _argvec[3+4]; \
3352 volatile unsigned long _res; \
3353 /* _argvec[0] holds current r2 across the call */ \
3354 _argvec[1] = (unsigned long)_orig.r2; \
3355 _argvec[2] = (unsigned long)_orig.nraddr; \
3356 _argvec[2+1] = (unsigned long)arg1; \
3357 _argvec[2+2] = (unsigned long)arg2; \
3358 _argvec[2+3] = (unsigned long)arg3; \
3359 _argvec[2+4] = (unsigned long)arg4; \
3360 __asm__ volatile( \
3361 VALGRIND_ALIGN_STACK \
3362 "mr 12,%1\n\t" \
3363 "std 2,-16(12)\n\t" /* save tocptr */ \
3364 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3365 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3366 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3367 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3368 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3369 "ld 12, 0(12)\n\t" /* target->r12 */ \
3370 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3371 "mr 12,%1\n\t" \
3372 "mr %0,3\n\t" \
3373 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3374 VALGRIND_RESTORE_STACK \
3375 : /*out*/ "=r" (_res) \
3376 : /*in*/ "r" (&_argvec[2]) \
3377 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3378 ); \
3379 lval = (__typeof__(lval)) _res; \
3380 } while (0)
3381
3382#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3383 do { \
3384 volatile OrigFn _orig = (orig); \
3385 volatile unsigned long _argvec[3+5]; \
3386 volatile unsigned long _res; \
3387 /* _argvec[0] holds current r2 across the call */ \
3388 _argvec[1] = (unsigned long)_orig.r2; \
3389 _argvec[2] = (unsigned long)_orig.nraddr; \
3390 _argvec[2+1] = (unsigned long)arg1; \
3391 _argvec[2+2] = (unsigned long)arg2; \
3392 _argvec[2+3] = (unsigned long)arg3; \
3393 _argvec[2+4] = (unsigned long)arg4; \
3394 _argvec[2+5] = (unsigned long)arg5; \
3395 __asm__ volatile( \
3396 VALGRIND_ALIGN_STACK \
3397 "mr 12,%1\n\t" \
3398 "std 2,-16(12)\n\t" /* save tocptr */ \
3399 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3400 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3401 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3402 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3403 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3404 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3405 "ld 12, 0(12)\n\t" /* target->r12 */ \
3406 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3407 "mr 12,%1\n\t" \
3408 "mr %0,3\n\t" \
3409 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3410 VALGRIND_RESTORE_STACK \
3411 : /*out*/ "=r" (_res) \
3412 : /*in*/ "r" (&_argvec[2]) \
3413 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3414 ); \
3415 lval = (__typeof__(lval)) _res; \
3416 } while (0)
3417
3418#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3419 do { \
3420 volatile OrigFn _orig = (orig); \
3421 volatile unsigned long _argvec[3+6]; \
3422 volatile unsigned long _res; \
3423 /* _argvec[0] holds current r2 across the call */ \
3424 _argvec[1] = (unsigned long)_orig.r2; \
3425 _argvec[2] = (unsigned long)_orig.nraddr; \
3426 _argvec[2+1] = (unsigned long)arg1; \
3427 _argvec[2+2] = (unsigned long)arg2; \
3428 _argvec[2+3] = (unsigned long)arg3; \
3429 _argvec[2+4] = (unsigned long)arg4; \
3430 _argvec[2+5] = (unsigned long)arg5; \
3431 _argvec[2+6] = (unsigned long)arg6; \
3432 __asm__ volatile( \
3433 VALGRIND_ALIGN_STACK \
3434 "mr 12,%1\n\t" \
3435 "std 2,-16(12)\n\t" /* save tocptr */ \
3436 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3437 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3438 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3439 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3440 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3441 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3442 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3443 "ld 12, 0(12)\n\t" /* target->r12 */ \
3444 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3445 "mr 12,%1\n\t" \
3446 "mr %0,3\n\t" \
3447 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3448 VALGRIND_RESTORE_STACK \
3449 : /*out*/ "=r" (_res) \
3450 : /*in*/ "r" (&_argvec[2]) \
3451 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3452 ); \
3453 lval = (__typeof__(lval)) _res; \
3454 } while (0)
3455
3456#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3457 arg7) \
3458 do { \
3459 volatile OrigFn _orig = (orig); \
3460 volatile unsigned long _argvec[3+7]; \
3461 volatile unsigned long _res; \
3462 /* _argvec[0] holds current r2 across the call */ \
3463 _argvec[1] = (unsigned long)_orig.r2; \
3464 _argvec[2] = (unsigned long)_orig.nraddr; \
3465 _argvec[2+1] = (unsigned long)arg1; \
3466 _argvec[2+2] = (unsigned long)arg2; \
3467 _argvec[2+3] = (unsigned long)arg3; \
3468 _argvec[2+4] = (unsigned long)arg4; \
3469 _argvec[2+5] = (unsigned long)arg5; \
3470 _argvec[2+6] = (unsigned long)arg6; \
3471 _argvec[2+7] = (unsigned long)arg7; \
3472 __asm__ volatile( \
3473 VALGRIND_ALIGN_STACK \
3474 "mr 12,%1\n\t" \
3475 "std 2,-16(12)\n\t" /* save tocptr */ \
3476 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3477 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3478 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3479 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3480 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3481 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3482 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3483 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3484 "ld 12, 0(12)\n\t" /* target->r12 */ \
3485 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3486 "mr 12,%1\n\t" \
3487 "mr %0,3\n\t" \
3488 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3489 VALGRIND_RESTORE_STACK \
3490 : /*out*/ "=r" (_res) \
3491 : /*in*/ "r" (&_argvec[2]) \
3492 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3493 ); \
3494 lval = (__typeof__(lval)) _res; \
3495 } while (0)
3496
3497#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3498 arg7,arg8) \
3499 do { \
3500 volatile OrigFn _orig = (orig); \
3501 volatile unsigned long _argvec[3+8]; \
3502 volatile unsigned long _res; \
3503 /* _argvec[0] holds current r2 across the call */ \
3504 _argvec[1] = (unsigned long)_orig.r2; \
3505 _argvec[2] = (unsigned long)_orig.nraddr; \
3506 _argvec[2+1] = (unsigned long)arg1; \
3507 _argvec[2+2] = (unsigned long)arg2; \
3508 _argvec[2+3] = (unsigned long)arg3; \
3509 _argvec[2+4] = (unsigned long)arg4; \
3510 _argvec[2+5] = (unsigned long)arg5; \
3511 _argvec[2+6] = (unsigned long)arg6; \
3512 _argvec[2+7] = (unsigned long)arg7; \
3513 _argvec[2+8] = (unsigned long)arg8; \
3514 __asm__ volatile( \
3515 VALGRIND_ALIGN_STACK \
3516 "mr 12,%1\n\t" \
3517 "std 2,-16(12)\n\t" /* save tocptr */ \
3518 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3519 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3520 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3521 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3522 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3523 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3524 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3525 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3526 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3527 "ld 12, 0(12)\n\t" /* target->r12 */ \
3528 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3529 "mr 12,%1\n\t" \
3530 "mr %0,3\n\t" \
3531 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3532 VALGRIND_RESTORE_STACK \
3533 : /*out*/ "=r" (_res) \
3534 : /*in*/ "r" (&_argvec[2]) \
3535 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3536 ); \
3537 lval = (__typeof__(lval)) _res; \
3538 } while (0)
3539
3540#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3541 arg7,arg8,arg9) \
3542 do { \
3543 volatile OrigFn _orig = (orig); \
3544 volatile unsigned long _argvec[3+9]; \
3545 volatile unsigned long _res; \
3546 /* _argvec[0] holds current r2 across the call */ \
3547 _argvec[1] = (unsigned long)_orig.r2; \
3548 _argvec[2] = (unsigned long)_orig.nraddr; \
3549 _argvec[2+1] = (unsigned long)arg1; \
3550 _argvec[2+2] = (unsigned long)arg2; \
3551 _argvec[2+3] = (unsigned long)arg3; \
3552 _argvec[2+4] = (unsigned long)arg4; \
3553 _argvec[2+5] = (unsigned long)arg5; \
3554 _argvec[2+6] = (unsigned long)arg6; \
3555 _argvec[2+7] = (unsigned long)arg7; \
3556 _argvec[2+8] = (unsigned long)arg8; \
3557 _argvec[2+9] = (unsigned long)arg9; \
3558 __asm__ volatile( \
3559 VALGRIND_ALIGN_STACK \
3560 "mr 12,%1\n\t" \
3561 "std 2,-16(12)\n\t" /* save tocptr */ \
3562 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3563 "addi 1,1,-128\n\t" /* expand stack frame */ \
3564 /* arg9 */ \
3565 "ld 3,72(12)\n\t" \
3566 "std 3,96(1)\n\t" \
3567 /* args1-8 */ \
3568 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3569 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3570 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3571 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3572 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3573 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3574 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3575 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3576 "ld 12, 0(12)\n\t" /* target->r12 */ \
3577 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3578 "mr 12,%1\n\t" \
3579 "mr %0,3\n\t" \
3580 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3581 VALGRIND_RESTORE_STACK \
3582 : /*out*/ "=r" (_res) \
3583 : /*in*/ "r" (&_argvec[2]) \
3584 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3585 ); \
3586 lval = (__typeof__(lval)) _res; \
3587 } while (0)
3588
3589#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3590 arg7,arg8,arg9,arg10) \
3591 do { \
3592 volatile OrigFn _orig = (orig); \
3593 volatile unsigned long _argvec[3+10]; \
3594 volatile unsigned long _res; \
3595 /* _argvec[0] holds current r2 across the call */ \
3596 _argvec[1] = (unsigned long)_orig.r2; \
3597 _argvec[2] = (unsigned long)_orig.nraddr; \
3598 _argvec[2+1] = (unsigned long)arg1; \
3599 _argvec[2+2] = (unsigned long)arg2; \
3600 _argvec[2+3] = (unsigned long)arg3; \
3601 _argvec[2+4] = (unsigned long)arg4; \
3602 _argvec[2+5] = (unsigned long)arg5; \
3603 _argvec[2+6] = (unsigned long)arg6; \
3604 _argvec[2+7] = (unsigned long)arg7; \
3605 _argvec[2+8] = (unsigned long)arg8; \
3606 _argvec[2+9] = (unsigned long)arg9; \
3607 _argvec[2+10] = (unsigned long)arg10; \
3608 __asm__ volatile( \
3609 VALGRIND_ALIGN_STACK \
3610 "mr 12,%1\n\t" \
3611 "std 2,-16(12)\n\t" /* save tocptr */ \
3612 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3613 "addi 1,1,-128\n\t" /* expand stack frame */ \
3614 /* arg10 */ \
3615 "ld 3,80(12)\n\t" \
3616 "std 3,104(1)\n\t" \
3617 /* arg9 */ \
3618 "ld 3,72(12)\n\t" \
3619 "std 3,96(1)\n\t" \
3620 /* args1-8 */ \
3621 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3622 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3623 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3624 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3625 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3626 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3627 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3628 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3629 "ld 12, 0(12)\n\t" /* target->r12 */ \
3630 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3631 "mr 12,%1\n\t" \
3632 "mr %0,3\n\t" \
3633 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3634 VALGRIND_RESTORE_STACK \
3635 : /*out*/ "=r" (_res) \
3636 : /*in*/ "r" (&_argvec[2]) \
3637 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3638 ); \
3639 lval = (__typeof__(lval)) _res; \
3640 } while (0)
3641
3642#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3643 arg7,arg8,arg9,arg10,arg11) \
3644 do { \
3645 volatile OrigFn _orig = (orig); \
3646 volatile unsigned long _argvec[3+11]; \
3647 volatile unsigned long _res; \
3648 /* _argvec[0] holds current r2 across the call */ \
3649 _argvec[1] = (unsigned long)_orig.r2; \
3650 _argvec[2] = (unsigned long)_orig.nraddr; \
3651 _argvec[2+1] = (unsigned long)arg1; \
3652 _argvec[2+2] = (unsigned long)arg2; \
3653 _argvec[2+3] = (unsigned long)arg3; \
3654 _argvec[2+4] = (unsigned long)arg4; \
3655 _argvec[2+5] = (unsigned long)arg5; \
3656 _argvec[2+6] = (unsigned long)arg6; \
3657 _argvec[2+7] = (unsigned long)arg7; \
3658 _argvec[2+8] = (unsigned long)arg8; \
3659 _argvec[2+9] = (unsigned long)arg9; \
3660 _argvec[2+10] = (unsigned long)arg10; \
3661 _argvec[2+11] = (unsigned long)arg11; \
3662 __asm__ volatile( \
3663 VALGRIND_ALIGN_STACK \
3664 "mr 12,%1\n\t" \
3665 "std 2,-16(12)\n\t" /* save tocptr */ \
3666 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3667 "addi 1,1,-144\n\t" /* expand stack frame */ \
3668 /* arg11 */ \
3669 "ld 3,88(12)\n\t" \
3670 "std 3,112(1)\n\t" \
3671 /* arg10 */ \
3672 "ld 3,80(12)\n\t" \
3673 "std 3,104(1)\n\t" \
3674 /* arg9 */ \
3675 "ld 3,72(12)\n\t" \
3676 "std 3,96(1)\n\t" \
3677 /* args1-8 */ \
3678 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3679 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3680 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3681 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3682 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3683 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3684 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3685 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3686 "ld 12, 0(12)\n\t" /* target->r12 */ \
3687 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3688 "mr 12,%1\n\t" \
3689 "mr %0,3\n\t" \
3690 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3691 VALGRIND_RESTORE_STACK \
3692 : /*out*/ "=r" (_res) \
3693 : /*in*/ "r" (&_argvec[2]) \
3694 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3695 ); \
3696 lval = (__typeof__(lval)) _res; \
3697 } while (0)
3698
3699#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3700 arg7,arg8,arg9,arg10,arg11,arg12) \
3701 do { \
3702 volatile OrigFn _orig = (orig); \
3703 volatile unsigned long _argvec[3+12]; \
3704 volatile unsigned long _res; \
3705 /* _argvec[0] holds current r2 across the call */ \
3706 _argvec[1] = (unsigned long)_orig.r2; \
3707 _argvec[2] = (unsigned long)_orig.nraddr; \
3708 _argvec[2+1] = (unsigned long)arg1; \
3709 _argvec[2+2] = (unsigned long)arg2; \
3710 _argvec[2+3] = (unsigned long)arg3; \
3711 _argvec[2+4] = (unsigned long)arg4; \
3712 _argvec[2+5] = (unsigned long)arg5; \
3713 _argvec[2+6] = (unsigned long)arg6; \
3714 _argvec[2+7] = (unsigned long)arg7; \
3715 _argvec[2+8] = (unsigned long)arg8; \
3716 _argvec[2+9] = (unsigned long)arg9; \
3717 _argvec[2+10] = (unsigned long)arg10; \
3718 _argvec[2+11] = (unsigned long)arg11; \
3719 _argvec[2+12] = (unsigned long)arg12; \
3720 __asm__ volatile( \
3721 VALGRIND_ALIGN_STACK \
3722 "mr 12,%1\n\t" \
3723 "std 2,-16(12)\n\t" /* save tocptr */ \
3724 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3725 "addi 1,1,-144\n\t" /* expand stack frame */ \
3726 /* arg12 */ \
3727 "ld 3,96(12)\n\t" \
3728 "std 3,120(1)\n\t" \
3729 /* arg11 */ \
3730 "ld 3,88(12)\n\t" \
3731 "std 3,112(1)\n\t" \
3732 /* arg10 */ \
3733 "ld 3,80(12)\n\t" \
3734 "std 3,104(1)\n\t" \
3735 /* arg9 */ \
3736 "ld 3,72(12)\n\t" \
3737 "std 3,96(1)\n\t" \
3738 /* args1-8 */ \
3739 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3740 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3741 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3742 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3743 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3744 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3745 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3746 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3747 "ld 12, 0(12)\n\t" /* target->r12 */ \
3748 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3749 "mr 12,%1\n\t" \
3750 "mr %0,3\n\t" \
3751 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3752 VALGRIND_RESTORE_STACK \
3753 : /*out*/ "=r" (_res) \
3754 : /*in*/ "r" (&_argvec[2]) \
3755 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3756 ); \
3757 lval = (__typeof__(lval)) _res; \
3758 } while (0)
3759
3760#endif /* PLAT_ppc64le_linux */
3761
3762/* ------------------------- arm-linux ------------------------- */
3763
3764#if defined(PLAT_arm_linux)
3765
3766/* These regs are trashed by the hidden call. */
3767#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3768
3769/* Macros to save and align the stack before making a function
3770 call and restore it afterwards as gcc may not keep the stack
3771 pointer aligned if it doesn't realise calls are being made
3772 to other functions. */
3773
3774/* This is a bit tricky. We store the original stack pointer in r10
3775 as it is callee-saves. gcc doesn't allow the use of r11 for some
3776 reason. Also, we can't directly "bic" the stack pointer in thumb
3777 mode since r13 isn't an allowed register number in that context.
3778 So use r4 as a temporary, since that is about to get trashed
3779 anyway, just after each use of this macro. Side effect is we need
3780 to be very careful about any future changes, since
3781 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3782#define VALGRIND_ALIGN_STACK \
3783 "mov r10, sp\n\t" \
3784 "mov r4, sp\n\t" \
3785 "bic r4, r4, #7\n\t" \
3786 "mov sp, r4\n\t"
3787#define VALGRIND_RESTORE_STACK \
3788 "mov sp, r10\n\t"
3789
3790/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3791 long) == 4. */
3792
3793#define CALL_FN_W_v(lval, orig) \
3794 do { \
3795 volatile OrigFn _orig = (orig); \
3796 volatile unsigned long _argvec[1]; \
3797 volatile unsigned long _res; \
3798 _argvec[0] = (unsigned long)_orig.nraddr; \
3799 __asm__ volatile( \
3800 VALGRIND_ALIGN_STACK \
3801 "ldr r4, [%1] \n\t" /* target->r4 */ \
3802 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3803 VALGRIND_RESTORE_STACK \
3804 "mov %0, r0\n" \
3805 : /*out*/ "=r" (_res) \
3806 : /*in*/ "0" (&_argvec[0]) \
3807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3808 ); \
3809 lval = (__typeof__(lval)) _res; \
3810 } while (0)
3811
3812#define CALL_FN_W_W(lval, orig, arg1) \
3813 do { \
3814 volatile OrigFn _orig = (orig); \
3815 volatile unsigned long _argvec[2]; \
3816 volatile unsigned long _res; \
3817 _argvec[0] = (unsigned long)_orig.nraddr; \
3818 _argvec[1] = (unsigned long)(arg1); \
3819 __asm__ volatile( \
3820 VALGRIND_ALIGN_STACK \
3821 "ldr r0, [%1, #4] \n\t" \
3822 "ldr r4, [%1] \n\t" /* target->r4 */ \
3823 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3824 VALGRIND_RESTORE_STACK \
3825 "mov %0, r0\n" \
3826 : /*out*/ "=r" (_res) \
3827 : /*in*/ "0" (&_argvec[0]) \
3828 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3829 ); \
3830 lval = (__typeof__(lval)) _res; \
3831 } while (0)
3832
3833#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3834 do { \
3835 volatile OrigFn _orig = (orig); \
3836 volatile unsigned long _argvec[3]; \
3837 volatile unsigned long _res; \
3838 _argvec[0] = (unsigned long)_orig.nraddr; \
3839 _argvec[1] = (unsigned long)(arg1); \
3840 _argvec[2] = (unsigned long)(arg2); \
3841 __asm__ volatile( \
3842 VALGRIND_ALIGN_STACK \
3843 "ldr r0, [%1, #4] \n\t" \
3844 "ldr r1, [%1, #8] \n\t" \
3845 "ldr r4, [%1] \n\t" /* target->r4 */ \
3846 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3847 VALGRIND_RESTORE_STACK \
3848 "mov %0, r0\n" \
3849 : /*out*/ "=r" (_res) \
3850 : /*in*/ "0" (&_argvec[0]) \
3851 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3852 ); \
3853 lval = (__typeof__(lval)) _res; \
3854 } while (0)
3855
3856#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3857 do { \
3858 volatile OrigFn _orig = (orig); \
3859 volatile unsigned long _argvec[4]; \
3860 volatile unsigned long _res; \
3861 _argvec[0] = (unsigned long)_orig.nraddr; \
3862 _argvec[1] = (unsigned long)(arg1); \
3863 _argvec[2] = (unsigned long)(arg2); \
3864 _argvec[3] = (unsigned long)(arg3); \
3865 __asm__ volatile( \
3866 VALGRIND_ALIGN_STACK \
3867 "ldr r0, [%1, #4] \n\t" \
3868 "ldr r1, [%1, #8] \n\t" \
3869 "ldr r2, [%1, #12] \n\t" \
3870 "ldr r4, [%1] \n\t" /* target->r4 */ \
3871 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3872 VALGRIND_RESTORE_STACK \
3873 "mov %0, r0\n" \
3874 : /*out*/ "=r" (_res) \
3875 : /*in*/ "0" (&_argvec[0]) \
3876 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3877 ); \
3878 lval = (__typeof__(lval)) _res; \
3879 } while (0)
3880
3881#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3882 do { \
3883 volatile OrigFn _orig = (orig); \
3884 volatile unsigned long _argvec[5]; \
3885 volatile unsigned long _res; \
3886 _argvec[0] = (unsigned long)_orig.nraddr; \
3887 _argvec[1] = (unsigned long)(arg1); \
3888 _argvec[2] = (unsigned long)(arg2); \
3889 _argvec[3] = (unsigned long)(arg3); \
3890 _argvec[4] = (unsigned long)(arg4); \
3891 __asm__ volatile( \
3892 VALGRIND_ALIGN_STACK \
3893 "ldr r0, [%1, #4] \n\t" \
3894 "ldr r1, [%1, #8] \n\t" \
3895 "ldr r2, [%1, #12] \n\t" \
3896 "ldr r3, [%1, #16] \n\t" \
3897 "ldr r4, [%1] \n\t" /* target->r4 */ \
3898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3899 VALGRIND_RESTORE_STACK \
3900 "mov %0, r0" \
3901 : /*out*/ "=r" (_res) \
3902 : /*in*/ "0" (&_argvec[0]) \
3903 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3904 ); \
3905 lval = (__typeof__(lval)) _res; \
3906 } while (0)
3907
3908#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3909 do { \
3910 volatile OrigFn _orig = (orig); \
3911 volatile unsigned long _argvec[6]; \
3912 volatile unsigned long _res; \
3913 _argvec[0] = (unsigned long)_orig.nraddr; \
3914 _argvec[1] = (unsigned long)(arg1); \
3915 _argvec[2] = (unsigned long)(arg2); \
3916 _argvec[3] = (unsigned long)(arg3); \
3917 _argvec[4] = (unsigned long)(arg4); \
3918 _argvec[5] = (unsigned long)(arg5); \
3919 __asm__ volatile( \
3920 VALGRIND_ALIGN_STACK \
3921 "sub sp, sp, #4 \n\t" \
3922 "ldr r0, [%1, #20] \n\t" \
3923 "push {r0} \n\t" \
3924 "ldr r0, [%1, #4] \n\t" \
3925 "ldr r1, [%1, #8] \n\t" \
3926 "ldr r2, [%1, #12] \n\t" \
3927 "ldr r3, [%1, #16] \n\t" \
3928 "ldr r4, [%1] \n\t" /* target->r4 */ \
3929 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3930 VALGRIND_RESTORE_STACK \
3931 "mov %0, r0" \
3932 : /*out*/ "=r" (_res) \
3933 : /*in*/ "0" (&_argvec[0]) \
3934 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3935 ); \
3936 lval = (__typeof__(lval)) _res; \
3937 } while (0)
3938
3939#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3940 do { \
3941 volatile OrigFn _orig = (orig); \
3942 volatile unsigned long _argvec[7]; \
3943 volatile unsigned long _res; \
3944 _argvec[0] = (unsigned long)_orig.nraddr; \
3945 _argvec[1] = (unsigned long)(arg1); \
3946 _argvec[2] = (unsigned long)(arg2); \
3947 _argvec[3] = (unsigned long)(arg3); \
3948 _argvec[4] = (unsigned long)(arg4); \
3949 _argvec[5] = (unsigned long)(arg5); \
3950 _argvec[6] = (unsigned long)(arg6); \
3951 __asm__ volatile( \
3952 VALGRIND_ALIGN_STACK \
3953 "ldr r0, [%1, #20] \n\t" \
3954 "ldr r1, [%1, #24] \n\t" \
3955 "push {r0, r1} \n\t" \
3956 "ldr r0, [%1, #4] \n\t" \
3957 "ldr r1, [%1, #8] \n\t" \
3958 "ldr r2, [%1, #12] \n\t" \
3959 "ldr r3, [%1, #16] \n\t" \
3960 "ldr r4, [%1] \n\t" /* target->r4 */ \
3961 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3962 VALGRIND_RESTORE_STACK \
3963 "mov %0, r0" \
3964 : /*out*/ "=r" (_res) \
3965 : /*in*/ "0" (&_argvec[0]) \
3966 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3967 ); \
3968 lval = (__typeof__(lval)) _res; \
3969 } while (0)
3970
3971#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3972 arg7) \
3973 do { \
3974 volatile OrigFn _orig = (orig); \
3975 volatile unsigned long _argvec[8]; \
3976 volatile unsigned long _res; \
3977 _argvec[0] = (unsigned long)_orig.nraddr; \
3978 _argvec[1] = (unsigned long)(arg1); \
3979 _argvec[2] = (unsigned long)(arg2); \
3980 _argvec[3] = (unsigned long)(arg3); \
3981 _argvec[4] = (unsigned long)(arg4); \
3982 _argvec[5] = (unsigned long)(arg5); \
3983 _argvec[6] = (unsigned long)(arg6); \
3984 _argvec[7] = (unsigned long)(arg7); \
3985 __asm__ volatile( \
3986 VALGRIND_ALIGN_STACK \
3987 "sub sp, sp, #4 \n\t" \
3988 "ldr r0, [%1, #20] \n\t" \
3989 "ldr r1, [%1, #24] \n\t" \
3990 "ldr r2, [%1, #28] \n\t" \
3991 "push {r0, r1, r2} \n\t" \
3992 "ldr r0, [%1, #4] \n\t" \
3993 "ldr r1, [%1, #8] \n\t" \
3994 "ldr r2, [%1, #12] \n\t" \
3995 "ldr r3, [%1, #16] \n\t" \
3996 "ldr r4, [%1] \n\t" /* target->r4 */ \
3997 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3998 VALGRIND_RESTORE_STACK \
3999 "mov %0, r0" \
4000 : /*out*/ "=r" (_res) \
4001 : /*in*/ "0" (&_argvec[0]) \
4002 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4003 ); \
4004 lval = (__typeof__(lval)) _res; \
4005 } while (0)
4006
4007#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4008 arg7,arg8) \
4009 do { \
4010 volatile OrigFn _orig = (orig); \
4011 volatile unsigned long _argvec[9]; \
4012 volatile unsigned long _res; \
4013 _argvec[0] = (unsigned long)_orig.nraddr; \
4014 _argvec[1] = (unsigned long)(arg1); \
4015 _argvec[2] = (unsigned long)(arg2); \
4016 _argvec[3] = (unsigned long)(arg3); \
4017 _argvec[4] = (unsigned long)(arg4); \
4018 _argvec[5] = (unsigned long)(arg5); \
4019 _argvec[6] = (unsigned long)(arg6); \
4020 _argvec[7] = (unsigned long)(arg7); \
4021 _argvec[8] = (unsigned long)(arg8); \
4022 __asm__ volatile( \
4023 VALGRIND_ALIGN_STACK \
4024 "ldr r0, [%1, #20] \n\t" \
4025 "ldr r1, [%1, #24] \n\t" \
4026 "ldr r2, [%1, #28] \n\t" \
4027 "ldr r3, [%1, #32] \n\t" \
4028 "push {r0, r1, r2, r3} \n\t" \
4029 "ldr r0, [%1, #4] \n\t" \
4030 "ldr r1, [%1, #8] \n\t" \
4031 "ldr r2, [%1, #12] \n\t" \
4032 "ldr r3, [%1, #16] \n\t" \
4033 "ldr r4, [%1] \n\t" /* target->r4 */ \
4034 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4035 VALGRIND_RESTORE_STACK \
4036 "mov %0, r0" \
4037 : /*out*/ "=r" (_res) \
4038 : /*in*/ "0" (&_argvec[0]) \
4039 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4040 ); \
4041 lval = (__typeof__(lval)) _res; \
4042 } while (0)
4043
4044#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4045 arg7,arg8,arg9) \
4046 do { \
4047 volatile OrigFn _orig = (orig); \
4048 volatile unsigned long _argvec[10]; \
4049 volatile unsigned long _res; \
4050 _argvec[0] = (unsigned long)_orig.nraddr; \
4051 _argvec[1] = (unsigned long)(arg1); \
4052 _argvec[2] = (unsigned long)(arg2); \
4053 _argvec[3] = (unsigned long)(arg3); \
4054 _argvec[4] = (unsigned long)(arg4); \
4055 _argvec[5] = (unsigned long)(arg5); \
4056 _argvec[6] = (unsigned long)(arg6); \
4057 _argvec[7] = (unsigned long)(arg7); \
4058 _argvec[8] = (unsigned long)(arg8); \
4059 _argvec[9] = (unsigned long)(arg9); \
4060 __asm__ volatile( \
4061 VALGRIND_ALIGN_STACK \
4062 "sub sp, sp, #4 \n\t" \
4063 "ldr r0, [%1, #20] \n\t" \
4064 "ldr r1, [%1, #24] \n\t" \
4065 "ldr r2, [%1, #28] \n\t" \
4066 "ldr r3, [%1, #32] \n\t" \
4067 "ldr r4, [%1, #36] \n\t" \
4068 "push {r0, r1, r2, r3, r4} \n\t" \
4069 "ldr r0, [%1, #4] \n\t" \
4070 "ldr r1, [%1, #8] \n\t" \
4071 "ldr r2, [%1, #12] \n\t" \
4072 "ldr r3, [%1, #16] \n\t" \
4073 "ldr r4, [%1] \n\t" /* target->r4 */ \
4074 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4075 VALGRIND_RESTORE_STACK \
4076 "mov %0, r0" \
4077 : /*out*/ "=r" (_res) \
4078 : /*in*/ "0" (&_argvec[0]) \
4079 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4080 ); \
4081 lval = (__typeof__(lval)) _res; \
4082 } while (0)
4083
4084#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4085 arg7,arg8,arg9,arg10) \
4086 do { \
4087 volatile OrigFn _orig = (orig); \
4088 volatile unsigned long _argvec[11]; \
4089 volatile unsigned long _res; \
4090 _argvec[0] = (unsigned long)_orig.nraddr; \
4091 _argvec[1] = (unsigned long)(arg1); \
4092 _argvec[2] = (unsigned long)(arg2); \
4093 _argvec[3] = (unsigned long)(arg3); \
4094 _argvec[4] = (unsigned long)(arg4); \
4095 _argvec[5] = (unsigned long)(arg5); \
4096 _argvec[6] = (unsigned long)(arg6); \
4097 _argvec[7] = (unsigned long)(arg7); \
4098 _argvec[8] = (unsigned long)(arg8); \
4099 _argvec[9] = (unsigned long)(arg9); \
4100 _argvec[10] = (unsigned long)(arg10); \
4101 __asm__ volatile( \
4102 VALGRIND_ALIGN_STACK \
4103 "ldr r0, [%1, #40] \n\t" \
4104 "push {r0} \n\t" \
4105 "ldr r0, [%1, #20] \n\t" \
4106 "ldr r1, [%1, #24] \n\t" \
4107 "ldr r2, [%1, #28] \n\t" \
4108 "ldr r3, [%1, #32] \n\t" \
4109 "ldr r4, [%1, #36] \n\t" \
4110 "push {r0, r1, r2, r3, r4} \n\t" \
4111 "ldr r0, [%1, #4] \n\t" \
4112 "ldr r1, [%1, #8] \n\t" \
4113 "ldr r2, [%1, #12] \n\t" \
4114 "ldr r3, [%1, #16] \n\t" \
4115 "ldr r4, [%1] \n\t" /* target->r4 */ \
4116 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4117 VALGRIND_RESTORE_STACK \
4118 "mov %0, r0" \
4119 : /*out*/ "=r" (_res) \
4120 : /*in*/ "0" (&_argvec[0]) \
4121 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4122 ); \
4123 lval = (__typeof__(lval)) _res; \
4124 } while (0)
4125
4126#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4127 arg6,arg7,arg8,arg9,arg10, \
4128 arg11) \
4129 do { \
4130 volatile OrigFn _orig = (orig); \
4131 volatile unsigned long _argvec[12]; \
4132 volatile unsigned long _res; \
4133 _argvec[0] = (unsigned long)_orig.nraddr; \
4134 _argvec[1] = (unsigned long)(arg1); \
4135 _argvec[2] = (unsigned long)(arg2); \
4136 _argvec[3] = (unsigned long)(arg3); \
4137 _argvec[4] = (unsigned long)(arg4); \
4138 _argvec[5] = (unsigned long)(arg5); \
4139 _argvec[6] = (unsigned long)(arg6); \
4140 _argvec[7] = (unsigned long)(arg7); \
4141 _argvec[8] = (unsigned long)(arg8); \
4142 _argvec[9] = (unsigned long)(arg9); \
4143 _argvec[10] = (unsigned long)(arg10); \
4144 _argvec[11] = (unsigned long)(arg11); \
4145 __asm__ volatile( \
4146 VALGRIND_ALIGN_STACK \
4147 "sub sp, sp, #4 \n\t" \
4148 "ldr r0, [%1, #40] \n\t" \
4149 "ldr r1, [%1, #44] \n\t" \
4150 "push {r0, r1} \n\t" \
4151 "ldr r0, [%1, #20] \n\t" \
4152 "ldr r1, [%1, #24] \n\t" \
4153 "ldr r2, [%1, #28] \n\t" \
4154 "ldr r3, [%1, #32] \n\t" \
4155 "ldr r4, [%1, #36] \n\t" \
4156 "push {r0, r1, r2, r3, r4} \n\t" \
4157 "ldr r0, [%1, #4] \n\t" \
4158 "ldr r1, [%1, #8] \n\t" \
4159 "ldr r2, [%1, #12] \n\t" \
4160 "ldr r3, [%1, #16] \n\t" \
4161 "ldr r4, [%1] \n\t" /* target->r4 */ \
4162 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4163 VALGRIND_RESTORE_STACK \
4164 "mov %0, r0" \
4165 : /*out*/ "=r" (_res) \
4166 : /*in*/ "0" (&_argvec[0]) \
4167 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4168 ); \
4169 lval = (__typeof__(lval)) _res; \
4170 } while (0)
4171
4172#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4173 arg6,arg7,arg8,arg9,arg10, \
4174 arg11,arg12) \
4175 do { \
4176 volatile OrigFn _orig = (orig); \
4177 volatile unsigned long _argvec[13]; \
4178 volatile unsigned long _res; \
4179 _argvec[0] = (unsigned long)_orig.nraddr; \
4180 _argvec[1] = (unsigned long)(arg1); \
4181 _argvec[2] = (unsigned long)(arg2); \
4182 _argvec[3] = (unsigned long)(arg3); \
4183 _argvec[4] = (unsigned long)(arg4); \
4184 _argvec[5] = (unsigned long)(arg5); \
4185 _argvec[6] = (unsigned long)(arg6); \
4186 _argvec[7] = (unsigned long)(arg7); \
4187 _argvec[8] = (unsigned long)(arg8); \
4188 _argvec[9] = (unsigned long)(arg9); \
4189 _argvec[10] = (unsigned long)(arg10); \
4190 _argvec[11] = (unsigned long)(arg11); \
4191 _argvec[12] = (unsigned long)(arg12); \
4192 __asm__ volatile( \
4193 VALGRIND_ALIGN_STACK \
4194 "ldr r0, [%1, #40] \n\t" \
4195 "ldr r1, [%1, #44] \n\t" \
4196 "ldr r2, [%1, #48] \n\t" \
4197 "push {r0, r1, r2} \n\t" \
4198 "ldr r0, [%1, #20] \n\t" \
4199 "ldr r1, [%1, #24] \n\t" \
4200 "ldr r2, [%1, #28] \n\t" \
4201 "ldr r3, [%1, #32] \n\t" \
4202 "ldr r4, [%1, #36] \n\t" \
4203 "push {r0, r1, r2, r3, r4} \n\t" \
4204 "ldr r0, [%1, #4] \n\t" \
4205 "ldr r1, [%1, #8] \n\t" \
4206 "ldr r2, [%1, #12] \n\t" \
4207 "ldr r3, [%1, #16] \n\t" \
4208 "ldr r4, [%1] \n\t" /* target->r4 */ \
4209 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4210 VALGRIND_RESTORE_STACK \
4211 "mov %0, r0" \
4212 : /*out*/ "=r" (_res) \
4213 : /*in*/ "0" (&_argvec[0]) \
4214 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4215 ); \
4216 lval = (__typeof__(lval)) _res; \
4217 } while (0)
4218
4219#endif /* PLAT_arm_linux */
4220
4221/* ------------------------ arm64-linux ------------------------ */
4222
4223#if defined(PLAT_arm64_linux)
4224
4225/* These regs are trashed by the hidden call. */
4226#define __CALLER_SAVED_REGS \
4227 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4228 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4229 "x18", "x19", "x20", "x30", \
4230 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4231 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4232 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4233 "v26", "v27", "v28", "v29", "v30", "v31"
4234
4235/* x21 is callee-saved, so we can use it to save and restore SP around
4236 the hidden call. */
4237#define VALGRIND_ALIGN_STACK \
4238 "mov x21, sp\n\t" \
4239 "bic sp, x21, #15\n\t"
4240#define VALGRIND_RESTORE_STACK \
4241 "mov sp, x21\n\t"
4242
4243/* These CALL_FN_ macros assume that on arm64-linux,
4244 sizeof(unsigned long) == 8. */
4245
4246#define CALL_FN_W_v(lval, orig) \
4247 do { \
4248 volatile OrigFn _orig = (orig); \
4249 volatile unsigned long _argvec[1]; \
4250 volatile unsigned long _res; \
4251 _argvec[0] = (unsigned long)_orig.nraddr; \
4252 __asm__ volatile( \
4253 VALGRIND_ALIGN_STACK \
4254 "ldr x8, [%1] \n\t" /* target->x8 */ \
4255 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4256 VALGRIND_RESTORE_STACK \
4257 "mov %0, x0\n" \
4258 : /*out*/ "=r" (_res) \
4259 : /*in*/ "0" (&_argvec[0]) \
4260 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4261 ); \
4262 lval = (__typeof__(lval)) _res; \
4263 } while (0)
4264
4265#define CALL_FN_W_W(lval, orig, arg1) \
4266 do { \
4267 volatile OrigFn _orig = (orig); \
4268 volatile unsigned long _argvec[2]; \
4269 volatile unsigned long _res; \
4270 _argvec[0] = (unsigned long)_orig.nraddr; \
4271 _argvec[1] = (unsigned long)(arg1); \
4272 __asm__ volatile( \
4273 VALGRIND_ALIGN_STACK \
4274 "ldr x0, [%1, #8] \n\t" \
4275 "ldr x8, [%1] \n\t" /* target->x8 */ \
4276 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4277 VALGRIND_RESTORE_STACK \
4278 "mov %0, x0\n" \
4279 : /*out*/ "=r" (_res) \
4280 : /*in*/ "0" (&_argvec[0]) \
4281 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4282 ); \
4283 lval = (__typeof__(lval)) _res; \
4284 } while (0)
4285
4286#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4287 do { \
4288 volatile OrigFn _orig = (orig); \
4289 volatile unsigned long _argvec[3]; \
4290 volatile unsigned long _res; \
4291 _argvec[0] = (unsigned long)_orig.nraddr; \
4292 _argvec[1] = (unsigned long)(arg1); \
4293 _argvec[2] = (unsigned long)(arg2); \
4294 __asm__ volatile( \
4295 VALGRIND_ALIGN_STACK \
4296 "ldr x0, [%1, #8] \n\t" \
4297 "ldr x1, [%1, #16] \n\t" \
4298 "ldr x8, [%1] \n\t" /* target->x8 */ \
4299 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4300 VALGRIND_RESTORE_STACK \
4301 "mov %0, x0\n" \
4302 : /*out*/ "=r" (_res) \
4303 : /*in*/ "0" (&_argvec[0]) \
4304 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4305 ); \
4306 lval = (__typeof__(lval)) _res; \
4307 } while (0)
4308
4309#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4310 do { \
4311 volatile OrigFn _orig = (orig); \
4312 volatile unsigned long _argvec[4]; \
4313 volatile unsigned long _res; \
4314 _argvec[0] = (unsigned long)_orig.nraddr; \
4315 _argvec[1] = (unsigned long)(arg1); \
4316 _argvec[2] = (unsigned long)(arg2); \
4317 _argvec[3] = (unsigned long)(arg3); \
4318 __asm__ volatile( \
4319 VALGRIND_ALIGN_STACK \
4320 "ldr x0, [%1, #8] \n\t" \
4321 "ldr x1, [%1, #16] \n\t" \
4322 "ldr x2, [%1, #24] \n\t" \
4323 "ldr x8, [%1] \n\t" /* target->x8 */ \
4324 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4325 VALGRIND_RESTORE_STACK \
4326 "mov %0, x0\n" \
4327 : /*out*/ "=r" (_res) \
4328 : /*in*/ "0" (&_argvec[0]) \
4329 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4330 ); \
4331 lval = (__typeof__(lval)) _res; \
4332 } while (0)
4333
4334#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4335 do { \
4336 volatile OrigFn _orig = (orig); \
4337 volatile unsigned long _argvec[5]; \
4338 volatile unsigned long _res; \
4339 _argvec[0] = (unsigned long)_orig.nraddr; \
4340 _argvec[1] = (unsigned long)(arg1); \
4341 _argvec[2] = (unsigned long)(arg2); \
4342 _argvec[3] = (unsigned long)(arg3); \
4343 _argvec[4] = (unsigned long)(arg4); \
4344 __asm__ volatile( \
4345 VALGRIND_ALIGN_STACK \
4346 "ldr x0, [%1, #8] \n\t" \
4347 "ldr x1, [%1, #16] \n\t" \
4348 "ldr x2, [%1, #24] \n\t" \
4349 "ldr x3, [%1, #32] \n\t" \
4350 "ldr x8, [%1] \n\t" /* target->x8 */ \
4351 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4352 VALGRIND_RESTORE_STACK \
4353 "mov %0, x0" \
4354 : /*out*/ "=r" (_res) \
4355 : /*in*/ "0" (&_argvec[0]) \
4356 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4357 ); \
4358 lval = (__typeof__(lval)) _res; \
4359 } while (0)
4360
4361#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4362 do { \
4363 volatile OrigFn _orig = (orig); \
4364 volatile unsigned long _argvec[6]; \
4365 volatile unsigned long _res; \
4366 _argvec[0] = (unsigned long)_orig.nraddr; \
4367 _argvec[1] = (unsigned long)(arg1); \
4368 _argvec[2] = (unsigned long)(arg2); \
4369 _argvec[3] = (unsigned long)(arg3); \
4370 _argvec[4] = (unsigned long)(arg4); \
4371 _argvec[5] = (unsigned long)(arg5); \
4372 __asm__ volatile( \
4373 VALGRIND_ALIGN_STACK \
4374 "ldr x0, [%1, #8] \n\t" \
4375 "ldr x1, [%1, #16] \n\t" \
4376 "ldr x2, [%1, #24] \n\t" \
4377 "ldr x3, [%1, #32] \n\t" \
4378 "ldr x4, [%1, #40] \n\t" \
4379 "ldr x8, [%1] \n\t" /* target->x8 */ \
4380 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4381 VALGRIND_RESTORE_STACK \
4382 "mov %0, x0" \
4383 : /*out*/ "=r" (_res) \
4384 : /*in*/ "0" (&_argvec[0]) \
4385 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4386 ); \
4387 lval = (__typeof__(lval)) _res; \
4388 } while (0)
4389
4390#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4391 do { \
4392 volatile OrigFn _orig = (orig); \
4393 volatile unsigned long _argvec[7]; \
4394 volatile unsigned long _res; \
4395 _argvec[0] = (unsigned long)_orig.nraddr; \
4396 _argvec[1] = (unsigned long)(arg1); \
4397 _argvec[2] = (unsigned long)(arg2); \
4398 _argvec[3] = (unsigned long)(arg3); \
4399 _argvec[4] = (unsigned long)(arg4); \
4400 _argvec[5] = (unsigned long)(arg5); \
4401 _argvec[6] = (unsigned long)(arg6); \
4402 __asm__ volatile( \
4403 VALGRIND_ALIGN_STACK \
4404 "ldr x0, [%1, #8] \n\t" \
4405 "ldr x1, [%1, #16] \n\t" \
4406 "ldr x2, [%1, #24] \n\t" \
4407 "ldr x3, [%1, #32] \n\t" \
4408 "ldr x4, [%1, #40] \n\t" \
4409 "ldr x5, [%1, #48] \n\t" \
4410 "ldr x8, [%1] \n\t" /* target->x8 */ \
4411 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4412 VALGRIND_RESTORE_STACK \
4413 "mov %0, x0" \
4414 : /*out*/ "=r" (_res) \
4415 : /*in*/ "0" (&_argvec[0]) \
4416 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4417 ); \
4418 lval = (__typeof__(lval)) _res; \
4419 } while (0)
4420
4421#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4422 arg7) \
4423 do { \
4424 volatile OrigFn _orig = (orig); \
4425 volatile unsigned long _argvec[8]; \
4426 volatile unsigned long _res; \
4427 _argvec[0] = (unsigned long)_orig.nraddr; \
4428 _argvec[1] = (unsigned long)(arg1); \
4429 _argvec[2] = (unsigned long)(arg2); \
4430 _argvec[3] = (unsigned long)(arg3); \
4431 _argvec[4] = (unsigned long)(arg4); \
4432 _argvec[5] = (unsigned long)(arg5); \
4433 _argvec[6] = (unsigned long)(arg6); \
4434 _argvec[7] = (unsigned long)(arg7); \
4435 __asm__ volatile( \
4436 VALGRIND_ALIGN_STACK \
4437 "ldr x0, [%1, #8] \n\t" \
4438 "ldr x1, [%1, #16] \n\t" \
4439 "ldr x2, [%1, #24] \n\t" \
4440 "ldr x3, [%1, #32] \n\t" \
4441 "ldr x4, [%1, #40] \n\t" \
4442 "ldr x5, [%1, #48] \n\t" \
4443 "ldr x6, [%1, #56] \n\t" \
4444 "ldr x8, [%1] \n\t" /* target->x8 */ \
4445 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4446 VALGRIND_RESTORE_STACK \
4447 "mov %0, x0" \
4448 : /*out*/ "=r" (_res) \
4449 : /*in*/ "0" (&_argvec[0]) \
4450 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4451 ); \
4452 lval = (__typeof__(lval)) _res; \
4453 } while (0)
4454
4455#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4456 arg7,arg8) \
4457 do { \
4458 volatile OrigFn _orig = (orig); \
4459 volatile unsigned long _argvec[9]; \
4460 volatile unsigned long _res; \
4461 _argvec[0] = (unsigned long)_orig.nraddr; \
4462 _argvec[1] = (unsigned long)(arg1); \
4463 _argvec[2] = (unsigned long)(arg2); \
4464 _argvec[3] = (unsigned long)(arg3); \
4465 _argvec[4] = (unsigned long)(arg4); \
4466 _argvec[5] = (unsigned long)(arg5); \
4467 _argvec[6] = (unsigned long)(arg6); \
4468 _argvec[7] = (unsigned long)(arg7); \
4469 _argvec[8] = (unsigned long)(arg8); \
4470 __asm__ volatile( \
4471 VALGRIND_ALIGN_STACK \
4472 "ldr x0, [%1, #8] \n\t" \
4473 "ldr x1, [%1, #16] \n\t" \
4474 "ldr x2, [%1, #24] \n\t" \
4475 "ldr x3, [%1, #32] \n\t" \
4476 "ldr x4, [%1, #40] \n\t" \
4477 "ldr x5, [%1, #48] \n\t" \
4478 "ldr x6, [%1, #56] \n\t" \
4479 "ldr x7, [%1, #64] \n\t" \
4480 "ldr x8, [%1] \n\t" /* target->x8 */ \
4481 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4482 VALGRIND_RESTORE_STACK \
4483 "mov %0, x0" \
4484 : /*out*/ "=r" (_res) \
4485 : /*in*/ "0" (&_argvec[0]) \
4486 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4487 ); \
4488 lval = (__typeof__(lval)) _res; \
4489 } while (0)
4490
4491#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4492 arg7,arg8,arg9) \
4493 do { \
4494 volatile OrigFn _orig = (orig); \
4495 volatile unsigned long _argvec[10]; \
4496 volatile unsigned long _res; \
4497 _argvec[0] = (unsigned long)_orig.nraddr; \
4498 _argvec[1] = (unsigned long)(arg1); \
4499 _argvec[2] = (unsigned long)(arg2); \
4500 _argvec[3] = (unsigned long)(arg3); \
4501 _argvec[4] = (unsigned long)(arg4); \
4502 _argvec[5] = (unsigned long)(arg5); \
4503 _argvec[6] = (unsigned long)(arg6); \
4504 _argvec[7] = (unsigned long)(arg7); \
4505 _argvec[8] = (unsigned long)(arg8); \
4506 _argvec[9] = (unsigned long)(arg9); \
4507 __asm__ volatile( \
4508 VALGRIND_ALIGN_STACK \
4509 "sub sp, sp, #0x20 \n\t" \
4510 "ldr x0, [%1, #8] \n\t" \
4511 "ldr x1, [%1, #16] \n\t" \
4512 "ldr x2, [%1, #24] \n\t" \
4513 "ldr x3, [%1, #32] \n\t" \
4514 "ldr x4, [%1, #40] \n\t" \
4515 "ldr x5, [%1, #48] \n\t" \
4516 "ldr x6, [%1, #56] \n\t" \
4517 "ldr x7, [%1, #64] \n\t" \
4518 "ldr x8, [%1, #72] \n\t" \
4519 "str x8, [sp, #0] \n\t" \
4520 "ldr x8, [%1] \n\t" /* target->x8 */ \
4521 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4522 VALGRIND_RESTORE_STACK \
4523 "mov %0, x0" \
4524 : /*out*/ "=r" (_res) \
4525 : /*in*/ "0" (&_argvec[0]) \
4526 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4527 ); \
4528 lval = (__typeof__(lval)) _res; \
4529 } while (0)
4530
4531#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4532 arg7,arg8,arg9,arg10) \
4533 do { \
4534 volatile OrigFn _orig = (orig); \
4535 volatile unsigned long _argvec[11]; \
4536 volatile unsigned long _res; \
4537 _argvec[0] = (unsigned long)_orig.nraddr; \
4538 _argvec[1] = (unsigned long)(arg1); \
4539 _argvec[2] = (unsigned long)(arg2); \
4540 _argvec[3] = (unsigned long)(arg3); \
4541 _argvec[4] = (unsigned long)(arg4); \
4542 _argvec[5] = (unsigned long)(arg5); \
4543 _argvec[6] = (unsigned long)(arg6); \
4544 _argvec[7] = (unsigned long)(arg7); \
4545 _argvec[8] = (unsigned long)(arg8); \
4546 _argvec[9] = (unsigned long)(arg9); \
4547 _argvec[10] = (unsigned long)(arg10); \
4548 __asm__ volatile( \
4549 VALGRIND_ALIGN_STACK \
4550 "sub sp, sp, #0x20 \n\t" \
4551 "ldr x0, [%1, #8] \n\t" \
4552 "ldr x1, [%1, #16] \n\t" \
4553 "ldr x2, [%1, #24] \n\t" \
4554 "ldr x3, [%1, #32] \n\t" \
4555 "ldr x4, [%1, #40] \n\t" \
4556 "ldr x5, [%1, #48] \n\t" \
4557 "ldr x6, [%1, #56] \n\t" \
4558 "ldr x7, [%1, #64] \n\t" \
4559 "ldr x8, [%1, #72] \n\t" \
4560 "str x8, [sp, #0] \n\t" \
4561 "ldr x8, [%1, #80] \n\t" \
4562 "str x8, [sp, #8] \n\t" \
4563 "ldr x8, [%1] \n\t" /* target->x8 */ \
4564 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4565 VALGRIND_RESTORE_STACK \
4566 "mov %0, x0" \
4567 : /*out*/ "=r" (_res) \
4568 : /*in*/ "0" (&_argvec[0]) \
4569 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4570 ); \
4571 lval = (__typeof__(lval)) _res; \
4572 } while (0)
4573
4574#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4575 arg7,arg8,arg9,arg10,arg11) \
4576 do { \
4577 volatile OrigFn _orig = (orig); \
4578 volatile unsigned long _argvec[12]; \
4579 volatile unsigned long _res; \
4580 _argvec[0] = (unsigned long)_orig.nraddr; \
4581 _argvec[1] = (unsigned long)(arg1); \
4582 _argvec[2] = (unsigned long)(arg2); \
4583 _argvec[3] = (unsigned long)(arg3); \
4584 _argvec[4] = (unsigned long)(arg4); \
4585 _argvec[5] = (unsigned long)(arg5); \
4586 _argvec[6] = (unsigned long)(arg6); \
4587 _argvec[7] = (unsigned long)(arg7); \
4588 _argvec[8] = (unsigned long)(arg8); \
4589 _argvec[9] = (unsigned long)(arg9); \
4590 _argvec[10] = (unsigned long)(arg10); \
4591 _argvec[11] = (unsigned long)(arg11); \
4592 __asm__ volatile( \
4593 VALGRIND_ALIGN_STACK \
4594 "sub sp, sp, #0x30 \n\t" \
4595 "ldr x0, [%1, #8] \n\t" \
4596 "ldr x1, [%1, #16] \n\t" \
4597 "ldr x2, [%1, #24] \n\t" \
4598 "ldr x3, [%1, #32] \n\t" \
4599 "ldr x4, [%1, #40] \n\t" \
4600 "ldr x5, [%1, #48] \n\t" \
4601 "ldr x6, [%1, #56] \n\t" \
4602 "ldr x7, [%1, #64] \n\t" \
4603 "ldr x8, [%1, #72] \n\t" \
4604 "str x8, [sp, #0] \n\t" \
4605 "ldr x8, [%1, #80] \n\t" \
4606 "str x8, [sp, #8] \n\t" \
4607 "ldr x8, [%1, #88] \n\t" \
4608 "str x8, [sp, #16] \n\t" \
4609 "ldr x8, [%1] \n\t" /* target->x8 */ \
4610 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4611 VALGRIND_RESTORE_STACK \
4612 "mov %0, x0" \
4613 : /*out*/ "=r" (_res) \
4614 : /*in*/ "0" (&_argvec[0]) \
4615 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4616 ); \
4617 lval = (__typeof__(lval)) _res; \
4618 } while (0)
4619
4620#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4621 arg7,arg8,arg9,arg10,arg11, \
4622 arg12) \
4623 do { \
4624 volatile OrigFn _orig = (orig); \
4625 volatile unsigned long _argvec[13]; \
4626 volatile unsigned long _res; \
4627 _argvec[0] = (unsigned long)_orig.nraddr; \
4628 _argvec[1] = (unsigned long)(arg1); \
4629 _argvec[2] = (unsigned long)(arg2); \
4630 _argvec[3] = (unsigned long)(arg3); \
4631 _argvec[4] = (unsigned long)(arg4); \
4632 _argvec[5] = (unsigned long)(arg5); \
4633 _argvec[6] = (unsigned long)(arg6); \
4634 _argvec[7] = (unsigned long)(arg7); \
4635 _argvec[8] = (unsigned long)(arg8); \
4636 _argvec[9] = (unsigned long)(arg9); \
4637 _argvec[10] = (unsigned long)(arg10); \
4638 _argvec[11] = (unsigned long)(arg11); \
4639 _argvec[12] = (unsigned long)(arg12); \
4640 __asm__ volatile( \
4641 VALGRIND_ALIGN_STACK \
4642 "sub sp, sp, #0x30 \n\t" \
4643 "ldr x0, [%1, #8] \n\t" \
4644 "ldr x1, [%1, #16] \n\t" \
4645 "ldr x2, [%1, #24] \n\t" \
4646 "ldr x3, [%1, #32] \n\t" \
4647 "ldr x4, [%1, #40] \n\t" \
4648 "ldr x5, [%1, #48] \n\t" \
4649 "ldr x6, [%1, #56] \n\t" \
4650 "ldr x7, [%1, #64] \n\t" \
4651 "ldr x8, [%1, #72] \n\t" \
4652 "str x8, [sp, #0] \n\t" \
4653 "ldr x8, [%1, #80] \n\t" \
4654 "str x8, [sp, #8] \n\t" \
4655 "ldr x8, [%1, #88] \n\t" \
4656 "str x8, [sp, #16] \n\t" \
4657 "ldr x8, [%1, #96] \n\t" \
4658 "str x8, [sp, #24] \n\t" \
4659 "ldr x8, [%1] \n\t" /* target->x8 */ \
4660 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4661 VALGRIND_RESTORE_STACK \
4662 "mov %0, x0" \
4663 : /*out*/ "=r" (_res) \
4664 : /*in*/ "0" (&_argvec[0]) \
4665 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4666 ); \
4667 lval = (__typeof__(lval)) _res; \
4668 } while (0)
4669
4670#endif /* PLAT_arm64_linux */
4671
4672/* ------------------------- s390x-linux ------------------------- */
4673
4674#if defined(PLAT_s390x_linux)
4675
4676/* Similar workaround as amd64 (see above), but we use r11 as frame
4677 pointer and save the old r11 in r7. r11 might be used for
4678 argvec, therefore we copy argvec in r1 since r1 is clobbered
4679 after the call anyway. */
4680#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4681# define __FRAME_POINTER \
4682 ,"d"(__builtin_dwarf_cfa())
4683# define VALGRIND_CFI_PROLOGUE \
4684 ".cfi_remember_state\n\t" \
4685 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4686 "lgr 7,11\n\t" \
4687 "lgr 11,%2\n\t" \
4688 ".cfi_def_cfa r11, 0\n\t"
4689# define VALGRIND_CFI_EPILOGUE \
4690 "lgr 11, 7\n\t" \
4691 ".cfi_restore_state\n\t"
4692#else
4693# define __FRAME_POINTER
4694# define VALGRIND_CFI_PROLOGUE \
4695 "lgr 1,%1\n\t"
4696# define VALGRIND_CFI_EPILOGUE
4697#endif
4698
4699/* Nb: On s390 the stack pointer is properly aligned *at all times*
4700 according to the s390 GCC maintainer. (The ABI specification is not
4701 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4702 VALGRIND_RESTORE_STACK are not defined here. */
4703
4704/* These regs are trashed by the hidden call. Note that we overwrite
4705 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4706 function a proper return address. All others are ABI defined call
4707 clobbers. */
4708#if defined(__VX__) || defined(__S390_VX__)
4709#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4710 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4711 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4712 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4713 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4714#else
4715#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4716 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4717#endif
4718
4719/* Nb: Although r11 is modified in the asm snippets below (inside
4720 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4721 two reasons:
4722 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4723 modified
4724 (2) GCC will complain that r11 cannot appear inside a clobber section,
4725 when compiled with -O -fno-omit-frame-pointer
4726 */
4727
4728#define CALL_FN_W_v(lval, orig) \
4729 do { \
4730 volatile OrigFn _orig = (orig); \
4731 volatile unsigned long _argvec[1]; \
4732 volatile unsigned long _res; \
4733 _argvec[0] = (unsigned long)_orig.nraddr; \
4734 __asm__ volatile( \
4735 VALGRIND_CFI_PROLOGUE \
4736 "aghi 15,-160\n\t" \
4737 "lg 1, 0(1)\n\t" /* target->r1 */ \
4738 VALGRIND_CALL_NOREDIR_R1 \
4739 "aghi 15,160\n\t" \
4740 VALGRIND_CFI_EPILOGUE \
4741 "lgr %0, 2\n\t" \
4742 : /*out*/ "=d" (_res) \
4743 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4744 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4745 ); \
4746 lval = (__typeof__(lval)) _res; \
4747 } while (0)
4748
4749/* The call abi has the arguments in r2-r6 and stack */
4750#define CALL_FN_W_W(lval, orig, arg1) \
4751 do { \
4752 volatile OrigFn _orig = (orig); \
4753 volatile unsigned long _argvec[2]; \
4754 volatile unsigned long _res; \
4755 _argvec[0] = (unsigned long)_orig.nraddr; \
4756 _argvec[1] = (unsigned long)arg1; \
4757 __asm__ volatile( \
4758 VALGRIND_CFI_PROLOGUE \
4759 "aghi 15,-160\n\t" \
4760 "lg 2, 8(1)\n\t" \
4761 "lg 1, 0(1)\n\t" \
4762 VALGRIND_CALL_NOREDIR_R1 \
4763 "aghi 15,160\n\t" \
4764 VALGRIND_CFI_EPILOGUE \
4765 "lgr %0, 2\n\t" \
4766 : /*out*/ "=d" (_res) \
4767 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4768 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4769 ); \
4770 lval = (__typeof__(lval)) _res; \
4771 } while (0)
4772
4773#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4774 do { \
4775 volatile OrigFn _orig = (orig); \
4776 volatile unsigned long _argvec[3]; \
4777 volatile unsigned long _res; \
4778 _argvec[0] = (unsigned long)_orig.nraddr; \
4779 _argvec[1] = (unsigned long)arg1; \
4780 _argvec[2] = (unsigned long)arg2; \
4781 __asm__ volatile( \
4782 VALGRIND_CFI_PROLOGUE \
4783 "aghi 15,-160\n\t" \
4784 "lg 2, 8(1)\n\t" \
4785 "lg 3,16(1)\n\t" \
4786 "lg 1, 0(1)\n\t" \
4787 VALGRIND_CALL_NOREDIR_R1 \
4788 "aghi 15,160\n\t" \
4789 VALGRIND_CFI_EPILOGUE \
4790 "lgr %0, 2\n\t" \
4791 : /*out*/ "=d" (_res) \
4792 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4793 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4794 ); \
4795 lval = (__typeof__(lval)) _res; \
4796 } while (0)
4797
4798#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4799 do { \
4800 volatile OrigFn _orig = (orig); \
4801 volatile unsigned long _argvec[4]; \
4802 volatile unsigned long _res; \
4803 _argvec[0] = (unsigned long)_orig.nraddr; \
4804 _argvec[1] = (unsigned long)arg1; \
4805 _argvec[2] = (unsigned long)arg2; \
4806 _argvec[3] = (unsigned long)arg3; \
4807 __asm__ volatile( \
4808 VALGRIND_CFI_PROLOGUE \
4809 "aghi 15,-160\n\t" \
4810 "lg 2, 8(1)\n\t" \
4811 "lg 3,16(1)\n\t" \
4812 "lg 4,24(1)\n\t" \
4813 "lg 1, 0(1)\n\t" \
4814 VALGRIND_CALL_NOREDIR_R1 \
4815 "aghi 15,160\n\t" \
4816 VALGRIND_CFI_EPILOGUE \
4817 "lgr %0, 2\n\t" \
4818 : /*out*/ "=d" (_res) \
4819 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4820 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4821 ); \
4822 lval = (__typeof__(lval)) _res; \
4823 } while (0)
4824
4825#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4826 do { \
4827 volatile OrigFn _orig = (orig); \
4828 volatile unsigned long _argvec[5]; \
4829 volatile unsigned long _res; \
4830 _argvec[0] = (unsigned long)_orig.nraddr; \
4831 _argvec[1] = (unsigned long)arg1; \
4832 _argvec[2] = (unsigned long)arg2; \
4833 _argvec[3] = (unsigned long)arg3; \
4834 _argvec[4] = (unsigned long)arg4; \
4835 __asm__ volatile( \
4836 VALGRIND_CFI_PROLOGUE \
4837 "aghi 15,-160\n\t" \
4838 "lg 2, 8(1)\n\t" \
4839 "lg 3,16(1)\n\t" \
4840 "lg 4,24(1)\n\t" \
4841 "lg 5,32(1)\n\t" \
4842 "lg 1, 0(1)\n\t" \
4843 VALGRIND_CALL_NOREDIR_R1 \
4844 "aghi 15,160\n\t" \
4845 VALGRIND_CFI_EPILOGUE \
4846 "lgr %0, 2\n\t" \
4847 : /*out*/ "=d" (_res) \
4848 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4849 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4850 ); \
4851 lval = (__typeof__(lval)) _res; \
4852 } while (0)
4853
4854#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4855 do { \
4856 volatile OrigFn _orig = (orig); \
4857 volatile unsigned long _argvec[6]; \
4858 volatile unsigned long _res; \
4859 _argvec[0] = (unsigned long)_orig.nraddr; \
4860 _argvec[1] = (unsigned long)arg1; \
4861 _argvec[2] = (unsigned long)arg2; \
4862 _argvec[3] = (unsigned long)arg3; \
4863 _argvec[4] = (unsigned long)arg4; \
4864 _argvec[5] = (unsigned long)arg5; \
4865 __asm__ volatile( \
4866 VALGRIND_CFI_PROLOGUE \
4867 "aghi 15,-160\n\t" \
4868 "lg 2, 8(1)\n\t" \
4869 "lg 3,16(1)\n\t" \
4870 "lg 4,24(1)\n\t" \
4871 "lg 5,32(1)\n\t" \
4872 "lg 6,40(1)\n\t" \
4873 "lg 1, 0(1)\n\t" \
4874 VALGRIND_CALL_NOREDIR_R1 \
4875 "aghi 15,160\n\t" \
4876 VALGRIND_CFI_EPILOGUE \
4877 "lgr %0, 2\n\t" \
4878 : /*out*/ "=d" (_res) \
4879 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4880 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4881 ); \
4882 lval = (__typeof__(lval)) _res; \
4883 } while (0)
4884
4885#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4886 arg6) \
4887 do { \
4888 volatile OrigFn _orig = (orig); \
4889 volatile unsigned long _argvec[7]; \
4890 volatile unsigned long _res; \
4891 _argvec[0] = (unsigned long)_orig.nraddr; \
4892 _argvec[1] = (unsigned long)arg1; \
4893 _argvec[2] = (unsigned long)arg2; \
4894 _argvec[3] = (unsigned long)arg3; \
4895 _argvec[4] = (unsigned long)arg4; \
4896 _argvec[5] = (unsigned long)arg5; \
4897 _argvec[6] = (unsigned long)arg6; \
4898 __asm__ volatile( \
4899 VALGRIND_CFI_PROLOGUE \
4900 "aghi 15,-168\n\t" \
4901 "lg 2, 8(1)\n\t" \
4902 "lg 3,16(1)\n\t" \
4903 "lg 4,24(1)\n\t" \
4904 "lg 5,32(1)\n\t" \
4905 "lg 6,40(1)\n\t" \
4906 "mvc 160(8,15), 48(1)\n\t" \
4907 "lg 1, 0(1)\n\t" \
4908 VALGRIND_CALL_NOREDIR_R1 \
4909 "aghi 15,168\n\t" \
4910 VALGRIND_CFI_EPILOGUE \
4911 "lgr %0, 2\n\t" \
4912 : /*out*/ "=d" (_res) \
4913 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4914 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4915 ); \
4916 lval = (__typeof__(lval)) _res; \
4917 } while (0)
4918
4919#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4920 arg6, arg7) \
4921 do { \
4922 volatile OrigFn _orig = (orig); \
4923 volatile unsigned long _argvec[8]; \
4924 volatile unsigned long _res; \
4925 _argvec[0] = (unsigned long)_orig.nraddr; \
4926 _argvec[1] = (unsigned long)arg1; \
4927 _argvec[2] = (unsigned long)arg2; \
4928 _argvec[3] = (unsigned long)arg3; \
4929 _argvec[4] = (unsigned long)arg4; \
4930 _argvec[5] = (unsigned long)arg5; \
4931 _argvec[6] = (unsigned long)arg6; \
4932 _argvec[7] = (unsigned long)arg7; \
4933 __asm__ volatile( \
4934 VALGRIND_CFI_PROLOGUE \
4935 "aghi 15,-176\n\t" \
4936 "lg 2, 8(1)\n\t" \
4937 "lg 3,16(1)\n\t" \
4938 "lg 4,24(1)\n\t" \
4939 "lg 5,32(1)\n\t" \
4940 "lg 6,40(1)\n\t" \
4941 "mvc 160(8,15), 48(1)\n\t" \
4942 "mvc 168(8,15), 56(1)\n\t" \
4943 "lg 1, 0(1)\n\t" \
4944 VALGRIND_CALL_NOREDIR_R1 \
4945 "aghi 15,176\n\t" \
4946 VALGRIND_CFI_EPILOGUE \
4947 "lgr %0, 2\n\t" \
4948 : /*out*/ "=d" (_res) \
4949 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4950 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4951 ); \
4952 lval = (__typeof__(lval)) _res; \
4953 } while (0)
4954
4955#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4956 arg6, arg7 ,arg8) \
4957 do { \
4958 volatile OrigFn _orig = (orig); \
4959 volatile unsigned long _argvec[9]; \
4960 volatile unsigned long _res; \
4961 _argvec[0] = (unsigned long)_orig.nraddr; \
4962 _argvec[1] = (unsigned long)arg1; \
4963 _argvec[2] = (unsigned long)arg2; \
4964 _argvec[3] = (unsigned long)arg3; \
4965 _argvec[4] = (unsigned long)arg4; \
4966 _argvec[5] = (unsigned long)arg5; \
4967 _argvec[6] = (unsigned long)arg6; \
4968 _argvec[7] = (unsigned long)arg7; \
4969 _argvec[8] = (unsigned long)arg8; \
4970 __asm__ volatile( \
4971 VALGRIND_CFI_PROLOGUE \
4972 "aghi 15,-184\n\t" \
4973 "lg 2, 8(1)\n\t" \
4974 "lg 3,16(1)\n\t" \
4975 "lg 4,24(1)\n\t" \
4976 "lg 5,32(1)\n\t" \
4977 "lg 6,40(1)\n\t" \
4978 "mvc 160(8,15), 48(1)\n\t" \
4979 "mvc 168(8,15), 56(1)\n\t" \
4980 "mvc 176(8,15), 64(1)\n\t" \
4981 "lg 1, 0(1)\n\t" \
4982 VALGRIND_CALL_NOREDIR_R1 \
4983 "aghi 15,184\n\t" \
4984 VALGRIND_CFI_EPILOGUE \
4985 "lgr %0, 2\n\t" \
4986 : /*out*/ "=d" (_res) \
4987 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4989 ); \
4990 lval = (__typeof__(lval)) _res; \
4991 } while (0)
4992
4993#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4994 arg6, arg7 ,arg8, arg9) \
4995 do { \
4996 volatile OrigFn _orig = (orig); \
4997 volatile unsigned long _argvec[10]; \
4998 volatile unsigned long _res; \
4999 _argvec[0] = (unsigned long)_orig.nraddr; \
5000 _argvec[1] = (unsigned long)arg1; \
5001 _argvec[2] = (unsigned long)arg2; \
5002 _argvec[3] = (unsigned long)arg3; \
5003 _argvec[4] = (unsigned long)arg4; \
5004 _argvec[5] = (unsigned long)arg5; \
5005 _argvec[6] = (unsigned long)arg6; \
5006 _argvec[7] = (unsigned long)arg7; \
5007 _argvec[8] = (unsigned long)arg8; \
5008 _argvec[9] = (unsigned long)arg9; \
5009 __asm__ volatile( \
5010 VALGRIND_CFI_PROLOGUE \
5011 "aghi 15,-192\n\t" \
5012 "lg 2, 8(1)\n\t" \
5013 "lg 3,16(1)\n\t" \
5014 "lg 4,24(1)\n\t" \
5015 "lg 5,32(1)\n\t" \
5016 "lg 6,40(1)\n\t" \
5017 "mvc 160(8,15), 48(1)\n\t" \
5018 "mvc 168(8,15), 56(1)\n\t" \
5019 "mvc 176(8,15), 64(1)\n\t" \
5020 "mvc 184(8,15), 72(1)\n\t" \
5021 "lg 1, 0(1)\n\t" \
5022 VALGRIND_CALL_NOREDIR_R1 \
5023 "aghi 15,192\n\t" \
5024 VALGRIND_CFI_EPILOGUE \
5025 "lgr %0, 2\n\t" \
5026 : /*out*/ "=d" (_res) \
5027 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5028 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5029 ); \
5030 lval = (__typeof__(lval)) _res; \
5031 } while (0)
5032
5033#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5034 arg6, arg7 ,arg8, arg9, arg10) \
5035 do { \
5036 volatile OrigFn _orig = (orig); \
5037 volatile unsigned long _argvec[11]; \
5038 volatile unsigned long _res; \
5039 _argvec[0] = (unsigned long)_orig.nraddr; \
5040 _argvec[1] = (unsigned long)arg1; \
5041 _argvec[2] = (unsigned long)arg2; \
5042 _argvec[3] = (unsigned long)arg3; \
5043 _argvec[4] = (unsigned long)arg4; \
5044 _argvec[5] = (unsigned long)arg5; \
5045 _argvec[6] = (unsigned long)arg6; \
5046 _argvec[7] = (unsigned long)arg7; \
5047 _argvec[8] = (unsigned long)arg8; \
5048 _argvec[9] = (unsigned long)arg9; \
5049 _argvec[10] = (unsigned long)arg10; \
5050 __asm__ volatile( \
5051 VALGRIND_CFI_PROLOGUE \
5052 "aghi 15,-200\n\t" \
5053 "lg 2, 8(1)\n\t" \
5054 "lg 3,16(1)\n\t" \
5055 "lg 4,24(1)\n\t" \
5056 "lg 5,32(1)\n\t" \
5057 "lg 6,40(1)\n\t" \
5058 "mvc 160(8,15), 48(1)\n\t" \
5059 "mvc 168(8,15), 56(1)\n\t" \
5060 "mvc 176(8,15), 64(1)\n\t" \
5061 "mvc 184(8,15), 72(1)\n\t" \
5062 "mvc 192(8,15), 80(1)\n\t" \
5063 "lg 1, 0(1)\n\t" \
5064 VALGRIND_CALL_NOREDIR_R1 \
5065 "aghi 15,200\n\t" \
5066 VALGRIND_CFI_EPILOGUE \
5067 "lgr %0, 2\n\t" \
5068 : /*out*/ "=d" (_res) \
5069 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5070 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5071 ); \
5072 lval = (__typeof__(lval)) _res; \
5073 } while (0)
5074
5075#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5076 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5077 do { \
5078 volatile OrigFn _orig = (orig); \
5079 volatile unsigned long _argvec[12]; \
5080 volatile unsigned long _res; \
5081 _argvec[0] = (unsigned long)_orig.nraddr; \
5082 _argvec[1] = (unsigned long)arg1; \
5083 _argvec[2] = (unsigned long)arg2; \
5084 _argvec[3] = (unsigned long)arg3; \
5085 _argvec[4] = (unsigned long)arg4; \
5086 _argvec[5] = (unsigned long)arg5; \
5087 _argvec[6] = (unsigned long)arg6; \
5088 _argvec[7] = (unsigned long)arg7; \
5089 _argvec[8] = (unsigned long)arg8; \
5090 _argvec[9] = (unsigned long)arg9; \
5091 _argvec[10] = (unsigned long)arg10; \
5092 _argvec[11] = (unsigned long)arg11; \
5093 __asm__ volatile( \
5094 VALGRIND_CFI_PROLOGUE \
5095 "aghi 15,-208\n\t" \
5096 "lg 2, 8(1)\n\t" \
5097 "lg 3,16(1)\n\t" \
5098 "lg 4,24(1)\n\t" \
5099 "lg 5,32(1)\n\t" \
5100 "lg 6,40(1)\n\t" \
5101 "mvc 160(8,15), 48(1)\n\t" \
5102 "mvc 168(8,15), 56(1)\n\t" \
5103 "mvc 176(8,15), 64(1)\n\t" \
5104 "mvc 184(8,15), 72(1)\n\t" \
5105 "mvc 192(8,15), 80(1)\n\t" \
5106 "mvc 200(8,15), 88(1)\n\t" \
5107 "lg 1, 0(1)\n\t" \
5108 VALGRIND_CALL_NOREDIR_R1 \
5109 "aghi 15,208\n\t" \
5110 VALGRIND_CFI_EPILOGUE \
5111 "lgr %0, 2\n\t" \
5112 : /*out*/ "=d" (_res) \
5113 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5114 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5115 ); \
5116 lval = (__typeof__(lval)) _res; \
5117 } while (0)
5118
5119#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5120 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5121 do { \
5122 volatile OrigFn _orig = (orig); \
5123 volatile unsigned long _argvec[13]; \
5124 volatile unsigned long _res; \
5125 _argvec[0] = (unsigned long)_orig.nraddr; \
5126 _argvec[1] = (unsigned long)arg1; \
5127 _argvec[2] = (unsigned long)arg2; \
5128 _argvec[3] = (unsigned long)arg3; \
5129 _argvec[4] = (unsigned long)arg4; \
5130 _argvec[5] = (unsigned long)arg5; \
5131 _argvec[6] = (unsigned long)arg6; \
5132 _argvec[7] = (unsigned long)arg7; \
5133 _argvec[8] = (unsigned long)arg8; \
5134 _argvec[9] = (unsigned long)arg9; \
5135 _argvec[10] = (unsigned long)arg10; \
5136 _argvec[11] = (unsigned long)arg11; \
5137 _argvec[12] = (unsigned long)arg12; \
5138 __asm__ volatile( \
5139 VALGRIND_CFI_PROLOGUE \
5140 "aghi 15,-216\n\t" \
5141 "lg 2, 8(1)\n\t" \
5142 "lg 3,16(1)\n\t" \
5143 "lg 4,24(1)\n\t" \
5144 "lg 5,32(1)\n\t" \
5145 "lg 6,40(1)\n\t" \
5146 "mvc 160(8,15), 48(1)\n\t" \
5147 "mvc 168(8,15), 56(1)\n\t" \
5148 "mvc 176(8,15), 64(1)\n\t" \
5149 "mvc 184(8,15), 72(1)\n\t" \
5150 "mvc 192(8,15), 80(1)\n\t" \
5151 "mvc 200(8,15), 88(1)\n\t" \
5152 "mvc 208(8,15), 96(1)\n\t" \
5153 "lg 1, 0(1)\n\t" \
5154 VALGRIND_CALL_NOREDIR_R1 \
5155 "aghi 15,216\n\t" \
5156 VALGRIND_CFI_EPILOGUE \
5157 "lgr %0, 2\n\t" \
5158 : /*out*/ "=d" (_res) \
5159 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5160 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5161 ); \
5162 lval = (__typeof__(lval)) _res; \
5163 } while (0)
5164
Sean Anderson3b004842022-03-23 14:04:48 -04005165#endif /* PLAT_s390x_linux */
5166
5167/* ------------------------- mips32-linux ----------------------- */
5168
5169#if defined(PLAT_mips32_linux)
5170
5171/* These regs are trashed by the hidden call. */
5172#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5173"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5174"$25", "$31"
5175
5176/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5177 long) == 4. */
5178
5179#define CALL_FN_W_v(lval, orig) \
5180 do { \
5181 volatile OrigFn _orig = (orig); \
5182 volatile unsigned long _argvec[1]; \
5183 volatile unsigned long _res; \
5184 _argvec[0] = (unsigned long)_orig.nraddr; \
5185 __asm__ volatile( \
5186 "subu $29, $29, 8 \n\t" \
5187 "sw $28, 0($29) \n\t" \
5188 "sw $31, 4($29) \n\t" \
5189 "subu $29, $29, 16 \n\t" \
5190 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5191 VALGRIND_CALL_NOREDIR_T9 \
5192 "addu $29, $29, 16\n\t" \
5193 "lw $28, 0($29) \n\t" \
5194 "lw $31, 4($29) \n\t" \
5195 "addu $29, $29, 8 \n\t" \
5196 "move %0, $2\n" \
5197 : /*out*/ "=r" (_res) \
5198 : /*in*/ "0" (&_argvec[0]) \
5199 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5200 ); \
5201 lval = (__typeof__(lval)) _res; \
5202 } while (0)
5203
5204#define CALL_FN_W_W(lval, orig, arg1) \
5205 do { \
5206 volatile OrigFn _orig = (orig); \
5207 volatile unsigned long _argvec[2]; \
5208 volatile unsigned long _res; \
5209 _argvec[0] = (unsigned long)_orig.nraddr; \
5210 _argvec[1] = (unsigned long)(arg1); \
5211 __asm__ volatile( \
5212 "subu $29, $29, 8 \n\t" \
5213 "sw $28, 0($29) \n\t" \
5214 "sw $31, 4($29) \n\t" \
5215 "subu $29, $29, 16 \n\t" \
5216 "lw $4, 4(%1) \n\t" /* arg1*/ \
5217 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5218 VALGRIND_CALL_NOREDIR_T9 \
5219 "addu $29, $29, 16 \n\t" \
5220 "lw $28, 0($29) \n\t" \
5221 "lw $31, 4($29) \n\t" \
5222 "addu $29, $29, 8 \n\t" \
5223 "move %0, $2\n" \
5224 : /*out*/ "=r" (_res) \
5225 : /*in*/ "0" (&_argvec[0]) \
5226 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5227 ); \
5228 lval = (__typeof__(lval)) _res; \
5229 } while (0)
5230
5231#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5232 do { \
5233 volatile OrigFn _orig = (orig); \
5234 volatile unsigned long _argvec[3]; \
5235 volatile unsigned long _res; \
5236 _argvec[0] = (unsigned long)_orig.nraddr; \
5237 _argvec[1] = (unsigned long)(arg1); \
5238 _argvec[2] = (unsigned long)(arg2); \
5239 __asm__ volatile( \
5240 "subu $29, $29, 8 \n\t" \
5241 "sw $28, 0($29) \n\t" \
5242 "sw $31, 4($29) \n\t" \
5243 "subu $29, $29, 16 \n\t" \
5244 "lw $4, 4(%1) \n\t" \
5245 "lw $5, 8(%1) \n\t" \
5246 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5247 VALGRIND_CALL_NOREDIR_T9 \
5248 "addu $29, $29, 16 \n\t" \
5249 "lw $28, 0($29) \n\t" \
5250 "lw $31, 4($29) \n\t" \
5251 "addu $29, $29, 8 \n\t" \
5252 "move %0, $2\n" \
5253 : /*out*/ "=r" (_res) \
5254 : /*in*/ "0" (&_argvec[0]) \
5255 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5256 ); \
5257 lval = (__typeof__(lval)) _res; \
5258 } while (0)
5259
5260#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5261 do { \
5262 volatile OrigFn _orig = (orig); \
5263 volatile unsigned long _argvec[4]; \
5264 volatile unsigned long _res; \
5265 _argvec[0] = (unsigned long)_orig.nraddr; \
5266 _argvec[1] = (unsigned long)(arg1); \
5267 _argvec[2] = (unsigned long)(arg2); \
5268 _argvec[3] = (unsigned long)(arg3); \
5269 __asm__ volatile( \
5270 "subu $29, $29, 8 \n\t" \
5271 "sw $28, 0($29) \n\t" \
5272 "sw $31, 4($29) \n\t" \
5273 "subu $29, $29, 16 \n\t" \
5274 "lw $4, 4(%1) \n\t" \
5275 "lw $5, 8(%1) \n\t" \
5276 "lw $6, 12(%1) \n\t" \
5277 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5278 VALGRIND_CALL_NOREDIR_T9 \
5279 "addu $29, $29, 16 \n\t" \
5280 "lw $28, 0($29) \n\t" \
5281 "lw $31, 4($29) \n\t" \
5282 "addu $29, $29, 8 \n\t" \
5283 "move %0, $2\n" \
5284 : /*out*/ "=r" (_res) \
5285 : /*in*/ "0" (&_argvec[0]) \
5286 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5287 ); \
5288 lval = (__typeof__(lval)) _res; \
5289 } while (0)
5290
5291#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5292 do { \
5293 volatile OrigFn _orig = (orig); \
5294 volatile unsigned long _argvec[5]; \
5295 volatile unsigned long _res; \
5296 _argvec[0] = (unsigned long)_orig.nraddr; \
5297 _argvec[1] = (unsigned long)(arg1); \
5298 _argvec[2] = (unsigned long)(arg2); \
5299 _argvec[3] = (unsigned long)(arg3); \
5300 _argvec[4] = (unsigned long)(arg4); \
5301 __asm__ volatile( \
5302 "subu $29, $29, 8 \n\t" \
5303 "sw $28, 0($29) \n\t" \
5304 "sw $31, 4($29) \n\t" \
5305 "subu $29, $29, 16 \n\t" \
5306 "lw $4, 4(%1) \n\t" \
5307 "lw $5, 8(%1) \n\t" \
5308 "lw $6, 12(%1) \n\t" \
5309 "lw $7, 16(%1) \n\t" \
5310 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5311 VALGRIND_CALL_NOREDIR_T9 \
5312 "addu $29, $29, 16 \n\t" \
5313 "lw $28, 0($29) \n\t" \
5314 "lw $31, 4($29) \n\t" \
5315 "addu $29, $29, 8 \n\t" \
5316 "move %0, $2\n" \
5317 : /*out*/ "=r" (_res) \
5318 : /*in*/ "0" (&_argvec[0]) \
5319 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5320 ); \
5321 lval = (__typeof__(lval)) _res; \
5322 } while (0)
5323
5324#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5325 do { \
5326 volatile OrigFn _orig = (orig); \
5327 volatile unsigned long _argvec[6]; \
5328 volatile unsigned long _res; \
5329 _argvec[0] = (unsigned long)_orig.nraddr; \
5330 _argvec[1] = (unsigned long)(arg1); \
5331 _argvec[2] = (unsigned long)(arg2); \
5332 _argvec[3] = (unsigned long)(arg3); \
5333 _argvec[4] = (unsigned long)(arg4); \
5334 _argvec[5] = (unsigned long)(arg5); \
5335 __asm__ volatile( \
5336 "subu $29, $29, 8 \n\t" \
5337 "sw $28, 0($29) \n\t" \
5338 "sw $31, 4($29) \n\t" \
5339 "lw $4, 20(%1) \n\t" \
5340 "subu $29, $29, 24\n\t" \
5341 "sw $4, 16($29) \n\t" \
5342 "lw $4, 4(%1) \n\t" \
5343 "lw $5, 8(%1) \n\t" \
5344 "lw $6, 12(%1) \n\t" \
5345 "lw $7, 16(%1) \n\t" \
5346 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5347 VALGRIND_CALL_NOREDIR_T9 \
5348 "addu $29, $29, 24 \n\t" \
5349 "lw $28, 0($29) \n\t" \
5350 "lw $31, 4($29) \n\t" \
5351 "addu $29, $29, 8 \n\t" \
5352 "move %0, $2\n" \
5353 : /*out*/ "=r" (_res) \
5354 : /*in*/ "0" (&_argvec[0]) \
5355 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5356 ); \
5357 lval = (__typeof__(lval)) _res; \
5358 } while (0)
5359#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5360 do { \
5361 volatile OrigFn _orig = (orig); \
5362 volatile unsigned long _argvec[7]; \
5363 volatile unsigned long _res; \
5364 _argvec[0] = (unsigned long)_orig.nraddr; \
5365 _argvec[1] = (unsigned long)(arg1); \
5366 _argvec[2] = (unsigned long)(arg2); \
5367 _argvec[3] = (unsigned long)(arg3); \
5368 _argvec[4] = (unsigned long)(arg4); \
5369 _argvec[5] = (unsigned long)(arg5); \
5370 _argvec[6] = (unsigned long)(arg6); \
5371 __asm__ volatile( \
5372 "subu $29, $29, 8 \n\t" \
5373 "sw $28, 0($29) \n\t" \
5374 "sw $31, 4($29) \n\t" \
5375 "lw $4, 20(%1) \n\t" \
5376 "subu $29, $29, 32\n\t" \
5377 "sw $4, 16($29) \n\t" \
5378 "lw $4, 24(%1) \n\t" \
5379 "nop\n\t" \
5380 "sw $4, 20($29) \n\t" \
5381 "lw $4, 4(%1) \n\t" \
5382 "lw $5, 8(%1) \n\t" \
5383 "lw $6, 12(%1) \n\t" \
5384 "lw $7, 16(%1) \n\t" \
5385 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5386 VALGRIND_CALL_NOREDIR_T9 \
5387 "addu $29, $29, 32 \n\t" \
5388 "lw $28, 0($29) \n\t" \
5389 "lw $31, 4($29) \n\t" \
5390 "addu $29, $29, 8 \n\t" \
5391 "move %0, $2\n" \
5392 : /*out*/ "=r" (_res) \
5393 : /*in*/ "0" (&_argvec[0]) \
5394 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5395 ); \
5396 lval = (__typeof__(lval)) _res; \
5397 } while (0)
5398
5399#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5400 arg7) \
5401 do { \
5402 volatile OrigFn _orig = (orig); \
5403 volatile unsigned long _argvec[8]; \
5404 volatile unsigned long _res; \
5405 _argvec[0] = (unsigned long)_orig.nraddr; \
5406 _argvec[1] = (unsigned long)(arg1); \
5407 _argvec[2] = (unsigned long)(arg2); \
5408 _argvec[3] = (unsigned long)(arg3); \
5409 _argvec[4] = (unsigned long)(arg4); \
5410 _argvec[5] = (unsigned long)(arg5); \
5411 _argvec[6] = (unsigned long)(arg6); \
5412 _argvec[7] = (unsigned long)(arg7); \
5413 __asm__ volatile( \
5414 "subu $29, $29, 8 \n\t" \
5415 "sw $28, 0($29) \n\t" \
5416 "sw $31, 4($29) \n\t" \
5417 "lw $4, 20(%1) \n\t" \
5418 "subu $29, $29, 32\n\t" \
5419 "sw $4, 16($29) \n\t" \
5420 "lw $4, 24(%1) \n\t" \
5421 "sw $4, 20($29) \n\t" \
5422 "lw $4, 28(%1) \n\t" \
5423 "sw $4, 24($29) \n\t" \
5424 "lw $4, 4(%1) \n\t" \
5425 "lw $5, 8(%1) \n\t" \
5426 "lw $6, 12(%1) \n\t" \
5427 "lw $7, 16(%1) \n\t" \
5428 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5429 VALGRIND_CALL_NOREDIR_T9 \
5430 "addu $29, $29, 32 \n\t" \
5431 "lw $28, 0($29) \n\t" \
5432 "lw $31, 4($29) \n\t" \
5433 "addu $29, $29, 8 \n\t" \
5434 "move %0, $2\n" \
5435 : /*out*/ "=r" (_res) \
5436 : /*in*/ "0" (&_argvec[0]) \
5437 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5438 ); \
5439 lval = (__typeof__(lval)) _res; \
5440 } while (0)
5441
5442#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5443 arg7,arg8) \
5444 do { \
5445 volatile OrigFn _orig = (orig); \
5446 volatile unsigned long _argvec[9]; \
5447 volatile unsigned long _res; \
5448 _argvec[0] = (unsigned long)_orig.nraddr; \
5449 _argvec[1] = (unsigned long)(arg1); \
5450 _argvec[2] = (unsigned long)(arg2); \
5451 _argvec[3] = (unsigned long)(arg3); \
5452 _argvec[4] = (unsigned long)(arg4); \
5453 _argvec[5] = (unsigned long)(arg5); \
5454 _argvec[6] = (unsigned long)(arg6); \
5455 _argvec[7] = (unsigned long)(arg7); \
5456 _argvec[8] = (unsigned long)(arg8); \
5457 __asm__ volatile( \
5458 "subu $29, $29, 8 \n\t" \
5459 "sw $28, 0($29) \n\t" \
5460 "sw $31, 4($29) \n\t" \
5461 "lw $4, 20(%1) \n\t" \
5462 "subu $29, $29, 40\n\t" \
5463 "sw $4, 16($29) \n\t" \
5464 "lw $4, 24(%1) \n\t" \
5465 "sw $4, 20($29) \n\t" \
5466 "lw $4, 28(%1) \n\t" \
5467 "sw $4, 24($29) \n\t" \
5468 "lw $4, 32(%1) \n\t" \
5469 "sw $4, 28($29) \n\t" \
5470 "lw $4, 4(%1) \n\t" \
5471 "lw $5, 8(%1) \n\t" \
5472 "lw $6, 12(%1) \n\t" \
5473 "lw $7, 16(%1) \n\t" \
5474 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5475 VALGRIND_CALL_NOREDIR_T9 \
5476 "addu $29, $29, 40 \n\t" \
5477 "lw $28, 0($29) \n\t" \
5478 "lw $31, 4($29) \n\t" \
5479 "addu $29, $29, 8 \n\t" \
5480 "move %0, $2\n" \
5481 : /*out*/ "=r" (_res) \
5482 : /*in*/ "0" (&_argvec[0]) \
5483 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5484 ); \
5485 lval = (__typeof__(lval)) _res; \
5486 } while (0)
5487
5488#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5489 arg7,arg8,arg9) \
5490 do { \
5491 volatile OrigFn _orig = (orig); \
5492 volatile unsigned long _argvec[10]; \
5493 volatile unsigned long _res; \
5494 _argvec[0] = (unsigned long)_orig.nraddr; \
5495 _argvec[1] = (unsigned long)(arg1); \
5496 _argvec[2] = (unsigned long)(arg2); \
5497 _argvec[3] = (unsigned long)(arg3); \
5498 _argvec[4] = (unsigned long)(arg4); \
5499 _argvec[5] = (unsigned long)(arg5); \
5500 _argvec[6] = (unsigned long)(arg6); \
5501 _argvec[7] = (unsigned long)(arg7); \
5502 _argvec[8] = (unsigned long)(arg8); \
5503 _argvec[9] = (unsigned long)(arg9); \
5504 __asm__ volatile( \
5505 "subu $29, $29, 8 \n\t" \
5506 "sw $28, 0($29) \n\t" \
5507 "sw $31, 4($29) \n\t" \
5508 "lw $4, 20(%1) \n\t" \
5509 "subu $29, $29, 40\n\t" \
5510 "sw $4, 16($29) \n\t" \
5511 "lw $4, 24(%1) \n\t" \
5512 "sw $4, 20($29) \n\t" \
5513 "lw $4, 28(%1) \n\t" \
5514 "sw $4, 24($29) \n\t" \
5515 "lw $4, 32(%1) \n\t" \
5516 "sw $4, 28($29) \n\t" \
5517 "lw $4, 36(%1) \n\t" \
5518 "sw $4, 32($29) \n\t" \
5519 "lw $4, 4(%1) \n\t" \
5520 "lw $5, 8(%1) \n\t" \
5521 "lw $6, 12(%1) \n\t" \
5522 "lw $7, 16(%1) \n\t" \
5523 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5524 VALGRIND_CALL_NOREDIR_T9 \
5525 "addu $29, $29, 40 \n\t" \
5526 "lw $28, 0($29) \n\t" \
5527 "lw $31, 4($29) \n\t" \
5528 "addu $29, $29, 8 \n\t" \
5529 "move %0, $2\n" \
5530 : /*out*/ "=r" (_res) \
5531 : /*in*/ "0" (&_argvec[0]) \
5532 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5533 ); \
5534 lval = (__typeof__(lval)) _res; \
5535 } while (0)
5536
5537#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5538 arg7,arg8,arg9,arg10) \
5539 do { \
5540 volatile OrigFn _orig = (orig); \
5541 volatile unsigned long _argvec[11]; \
5542 volatile unsigned long _res; \
5543 _argvec[0] = (unsigned long)_orig.nraddr; \
5544 _argvec[1] = (unsigned long)(arg1); \
5545 _argvec[2] = (unsigned long)(arg2); \
5546 _argvec[3] = (unsigned long)(arg3); \
5547 _argvec[4] = (unsigned long)(arg4); \
5548 _argvec[5] = (unsigned long)(arg5); \
5549 _argvec[6] = (unsigned long)(arg6); \
5550 _argvec[7] = (unsigned long)(arg7); \
5551 _argvec[8] = (unsigned long)(arg8); \
5552 _argvec[9] = (unsigned long)(arg9); \
5553 _argvec[10] = (unsigned long)(arg10); \
5554 __asm__ volatile( \
5555 "subu $29, $29, 8 \n\t" \
5556 "sw $28, 0($29) \n\t" \
5557 "sw $31, 4($29) \n\t" \
5558 "lw $4, 20(%1) \n\t" \
5559 "subu $29, $29, 48\n\t" \
5560 "sw $4, 16($29) \n\t" \
5561 "lw $4, 24(%1) \n\t" \
5562 "sw $4, 20($29) \n\t" \
5563 "lw $4, 28(%1) \n\t" \
5564 "sw $4, 24($29) \n\t" \
5565 "lw $4, 32(%1) \n\t" \
5566 "sw $4, 28($29) \n\t" \
5567 "lw $4, 36(%1) \n\t" \
5568 "sw $4, 32($29) \n\t" \
5569 "lw $4, 40(%1) \n\t" \
5570 "sw $4, 36($29) \n\t" \
5571 "lw $4, 4(%1) \n\t" \
5572 "lw $5, 8(%1) \n\t" \
5573 "lw $6, 12(%1) \n\t" \
5574 "lw $7, 16(%1) \n\t" \
5575 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5576 VALGRIND_CALL_NOREDIR_T9 \
5577 "addu $29, $29, 48 \n\t" \
5578 "lw $28, 0($29) \n\t" \
5579 "lw $31, 4($29) \n\t" \
5580 "addu $29, $29, 8 \n\t" \
5581 "move %0, $2\n" \
5582 : /*out*/ "=r" (_res) \
5583 : /*in*/ "0" (&_argvec[0]) \
5584 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5585 ); \
5586 lval = (__typeof__(lval)) _res; \
5587 } while (0)
5588
5589#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5590 arg6,arg7,arg8,arg9,arg10, \
5591 arg11) \
5592 do { \
5593 volatile OrigFn _orig = (orig); \
5594 volatile unsigned long _argvec[12]; \
5595 volatile unsigned long _res; \
5596 _argvec[0] = (unsigned long)_orig.nraddr; \
5597 _argvec[1] = (unsigned long)(arg1); \
5598 _argvec[2] = (unsigned long)(arg2); \
5599 _argvec[3] = (unsigned long)(arg3); \
5600 _argvec[4] = (unsigned long)(arg4); \
5601 _argvec[5] = (unsigned long)(arg5); \
5602 _argvec[6] = (unsigned long)(arg6); \
5603 _argvec[7] = (unsigned long)(arg7); \
5604 _argvec[8] = (unsigned long)(arg8); \
5605 _argvec[9] = (unsigned long)(arg9); \
5606 _argvec[10] = (unsigned long)(arg10); \
5607 _argvec[11] = (unsigned long)(arg11); \
5608 __asm__ volatile( \
5609 "subu $29, $29, 8 \n\t" \
5610 "sw $28, 0($29) \n\t" \
5611 "sw $31, 4($29) \n\t" \
5612 "lw $4, 20(%1) \n\t" \
5613 "subu $29, $29, 48\n\t" \
5614 "sw $4, 16($29) \n\t" \
5615 "lw $4, 24(%1) \n\t" \
5616 "sw $4, 20($29) \n\t" \
5617 "lw $4, 28(%1) \n\t" \
5618 "sw $4, 24($29) \n\t" \
5619 "lw $4, 32(%1) \n\t" \
5620 "sw $4, 28($29) \n\t" \
5621 "lw $4, 36(%1) \n\t" \
5622 "sw $4, 32($29) \n\t" \
5623 "lw $4, 40(%1) \n\t" \
5624 "sw $4, 36($29) \n\t" \
5625 "lw $4, 44(%1) \n\t" \
5626 "sw $4, 40($29) \n\t" \
5627 "lw $4, 4(%1) \n\t" \
5628 "lw $5, 8(%1) \n\t" \
5629 "lw $6, 12(%1) \n\t" \
5630 "lw $7, 16(%1) \n\t" \
5631 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5632 VALGRIND_CALL_NOREDIR_T9 \
5633 "addu $29, $29, 48 \n\t" \
5634 "lw $28, 0($29) \n\t" \
5635 "lw $31, 4($29) \n\t" \
5636 "addu $29, $29, 8 \n\t" \
5637 "move %0, $2\n" \
5638 : /*out*/ "=r" (_res) \
5639 : /*in*/ "0" (&_argvec[0]) \
5640 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5641 ); \
5642 lval = (__typeof__(lval)) _res; \
5643 } while (0)
5644
5645#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5646 arg6,arg7,arg8,arg9,arg10, \
5647 arg11,arg12) \
5648 do { \
5649 volatile OrigFn _orig = (orig); \
5650 volatile unsigned long _argvec[13]; \
5651 volatile unsigned long _res; \
5652 _argvec[0] = (unsigned long)_orig.nraddr; \
5653 _argvec[1] = (unsigned long)(arg1); \
5654 _argvec[2] = (unsigned long)(arg2); \
5655 _argvec[3] = (unsigned long)(arg3); \
5656 _argvec[4] = (unsigned long)(arg4); \
5657 _argvec[5] = (unsigned long)(arg5); \
5658 _argvec[6] = (unsigned long)(arg6); \
5659 _argvec[7] = (unsigned long)(arg7); \
5660 _argvec[8] = (unsigned long)(arg8); \
5661 _argvec[9] = (unsigned long)(arg9); \
5662 _argvec[10] = (unsigned long)(arg10); \
5663 _argvec[11] = (unsigned long)(arg11); \
5664 _argvec[12] = (unsigned long)(arg12); \
5665 __asm__ volatile( \
5666 "subu $29, $29, 8 \n\t" \
5667 "sw $28, 0($29) \n\t" \
5668 "sw $31, 4($29) \n\t" \
5669 "lw $4, 20(%1) \n\t" \
5670 "subu $29, $29, 56\n\t" \
5671 "sw $4, 16($29) \n\t" \
5672 "lw $4, 24(%1) \n\t" \
5673 "sw $4, 20($29) \n\t" \
5674 "lw $4, 28(%1) \n\t" \
5675 "sw $4, 24($29) \n\t" \
5676 "lw $4, 32(%1) \n\t" \
5677 "sw $4, 28($29) \n\t" \
5678 "lw $4, 36(%1) \n\t" \
5679 "sw $4, 32($29) \n\t" \
5680 "lw $4, 40(%1) \n\t" \
5681 "sw $4, 36($29) \n\t" \
5682 "lw $4, 44(%1) \n\t" \
5683 "sw $4, 40($29) \n\t" \
5684 "lw $4, 48(%1) \n\t" \
5685 "sw $4, 44($29) \n\t" \
5686 "lw $4, 4(%1) \n\t" \
5687 "lw $5, 8(%1) \n\t" \
5688 "lw $6, 12(%1) \n\t" \
5689 "lw $7, 16(%1) \n\t" \
5690 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5691 VALGRIND_CALL_NOREDIR_T9 \
5692 "addu $29, $29, 56 \n\t" \
5693 "lw $28, 0($29) \n\t" \
5694 "lw $31, 4($29) \n\t" \
5695 "addu $29, $29, 8 \n\t" \
5696 "move %0, $2\n" \
5697 : /*out*/ "=r" (_res) \
5698 : /*in*/ "r" (&_argvec[0]) \
5699 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5700 ); \
5701 lval = (__typeof__(lval)) _res; \
5702 } while (0)
5703
5704#endif /* PLAT_mips32_linux */
5705
5706/* ------------------------- nanomips-linux -------------------- */
5707
5708#if defined(PLAT_nanomips_linux)
5709
5710/* These regs are trashed by the hidden call. */
5711#define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5712"$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5713"$t8","$t9", "$at"
5714
5715/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5716 long) == 4. */
5717
5718#define CALL_FN_W_v(lval, orig) \
5719 do { \
5720 volatile OrigFn _orig = (orig); \
5721 volatile unsigned long _argvec[1]; \
5722 volatile unsigned long _res; \
5723 _argvec[0] = (unsigned long)_orig.nraddr; \
5724 __asm__ volatile( \
5725 "lw $t9, 0(%1)\n\t" \
5726 VALGRIND_CALL_NOREDIR_T9 \
5727 "move %0, $a0\n" \
5728 : /*out*/ "=r" (_res) \
5729 : /*in*/ "r" (&_argvec[0]) \
5730 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5731 ); \
5732 lval = (__typeof__(lval)) _res; \
5733 } while (0)
5734
5735#define CALL_FN_W_W(lval, orig, arg1) \
5736 do { \
5737 volatile OrigFn _orig = (orig); \
5738 volatile unsigned long _argvec[2]; \
5739 volatile unsigned long _res; \
5740 _argvec[0] = (unsigned long)_orig.nraddr; \
5741 _argvec[1] = (unsigned long)(arg1); \
5742 __asm__ volatile( \
5743 "lw $t9, 0(%1)\n\t" \
5744 "lw $a0, 4(%1)\n\t" \
5745 VALGRIND_CALL_NOREDIR_T9 \
5746 "move %0, $a0\n" \
5747 : /*out*/ "=r" (_res) \
5748 : /*in*/ "r" (&_argvec[0]) \
5749 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5750 ); \
5751 lval = (__typeof__(lval)) _res; \
5752 } while (0)
5753
5754#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5755 do { \
5756 volatile OrigFn _orig = (orig); \
5757 volatile unsigned long _argvec[3]; \
5758 volatile unsigned long _res; \
5759 _argvec[0] = (unsigned long)_orig.nraddr; \
5760 _argvec[1] = (unsigned long)(arg1); \
5761 _argvec[2] = (unsigned long)(arg2); \
5762 __asm__ volatile( \
5763 "lw $t9, 0(%1)\n\t" \
5764 "lw $a0, 4(%1)\n\t" \
5765 "lw $a1, 8(%1)\n\t" \
5766 VALGRIND_CALL_NOREDIR_T9 \
5767 "move %0, $a0\n" \
5768 : /*out*/ "=r" (_res) \
5769 : /*in*/ "r" (&_argvec[0]) \
5770 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5771 ); \
5772 lval = (__typeof__(lval)) _res; \
5773 } while (0)
5774
5775#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5776 do { \
5777 volatile OrigFn _orig = (orig); \
5778 volatile unsigned long _argvec[4]; \
5779 volatile unsigned long _res; \
5780 _argvec[0] = (unsigned long)_orig.nraddr; \
5781 _argvec[1] = (unsigned long)(arg1); \
5782 _argvec[2] = (unsigned long)(arg2); \
5783 _argvec[3] = (unsigned long)(arg3); \
5784 __asm__ volatile( \
5785 "lw $t9, 0(%1)\n\t" \
5786 "lw $a0, 4(%1)\n\t" \
5787 "lw $a1, 8(%1)\n\t" \
5788 "lw $a2,12(%1)\n\t" \
5789 VALGRIND_CALL_NOREDIR_T9 \
5790 "move %0, $a0\n" \
5791 : /*out*/ "=r" (_res) \
5792 : /*in*/ "r" (&_argvec[0]) \
5793 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5794 ); \
5795 lval = (__typeof__(lval)) _res; \
5796 } while (0)
5797
5798#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5799 do { \
5800 volatile OrigFn _orig = (orig); \
5801 volatile unsigned long _argvec[5]; \
5802 volatile unsigned long _res; \
5803 _argvec[0] = (unsigned long)_orig.nraddr; \
5804 _argvec[1] = (unsigned long)(arg1); \
5805 _argvec[2] = (unsigned long)(arg2); \
5806 _argvec[3] = (unsigned long)(arg3); \
5807 _argvec[4] = (unsigned long)(arg4); \
5808 __asm__ volatile( \
5809 "lw $t9, 0(%1)\n\t" \
5810 "lw $a0, 4(%1)\n\t" \
5811 "lw $a1, 8(%1)\n\t" \
5812 "lw $a2,12(%1)\n\t" \
5813 "lw $a3,16(%1)\n\t" \
5814 VALGRIND_CALL_NOREDIR_T9 \
5815 "move %0, $a0\n" \
5816 : /*out*/ "=r" (_res) \
5817 : /*in*/ "r" (&_argvec[0]) \
5818 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5819 ); \
5820 lval = (__typeof__(lval)) _res; \
5821 } while (0)
5822
5823#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5824 do { \
5825 volatile OrigFn _orig = (orig); \
5826 volatile unsigned long _argvec[6]; \
5827 volatile unsigned long _res; \
5828 _argvec[0] = (unsigned long)_orig.nraddr; \
5829 _argvec[1] = (unsigned long)(arg1); \
5830 _argvec[2] = (unsigned long)(arg2); \
5831 _argvec[3] = (unsigned long)(arg3); \
5832 _argvec[4] = (unsigned long)(arg4); \
5833 _argvec[5] = (unsigned long)(arg5); \
5834 __asm__ volatile( \
5835 "lw $t9, 0(%1)\n\t" \
5836 "lw $a0, 4(%1)\n\t" \
5837 "lw $a1, 8(%1)\n\t" \
5838 "lw $a2,12(%1)\n\t" \
5839 "lw $a3,16(%1)\n\t" \
5840 "lw $a4,20(%1)\n\t" \
5841 VALGRIND_CALL_NOREDIR_T9 \
5842 "move %0, $a0\n" \
5843 : /*out*/ "=r" (_res) \
5844 : /*in*/ "r" (&_argvec[0]) \
5845 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5846 ); \
5847 lval = (__typeof__(lval)) _res; \
5848 } while (0)
5849#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5850 do { \
5851 volatile OrigFn _orig = (orig); \
5852 volatile unsigned long _argvec[7]; \
5853 volatile unsigned long _res; \
5854 _argvec[0] = (unsigned long)_orig.nraddr; \
5855 _argvec[1] = (unsigned long)(arg1); \
5856 _argvec[2] = (unsigned long)(arg2); \
5857 _argvec[3] = (unsigned long)(arg3); \
5858 _argvec[4] = (unsigned long)(arg4); \
5859 _argvec[5] = (unsigned long)(arg5); \
5860 _argvec[6] = (unsigned long)(arg6); \
5861 __asm__ volatile( \
5862 "lw $t9, 0(%1)\n\t" \
5863 "lw $a0, 4(%1)\n\t" \
5864 "lw $a1, 8(%1)\n\t" \
5865 "lw $a2,12(%1)\n\t" \
5866 "lw $a3,16(%1)\n\t" \
5867 "lw $a4,20(%1)\n\t" \
5868 "lw $a5,24(%1)\n\t" \
5869 VALGRIND_CALL_NOREDIR_T9 \
5870 "move %0, $a0\n" \
5871 : /*out*/ "=r" (_res) \
5872 : /*in*/ "r" (&_argvec[0]) \
5873 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5874 ); \
5875 lval = (__typeof__(lval)) _res; \
5876 } while (0)
5877
5878#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5879 arg7) \
5880 do { \
5881 volatile OrigFn _orig = (orig); \
5882 volatile unsigned long _argvec[8]; \
5883 volatile unsigned long _res; \
5884 _argvec[0] = (unsigned long)_orig.nraddr; \
5885 _argvec[1] = (unsigned long)(arg1); \
5886 _argvec[2] = (unsigned long)(arg2); \
5887 _argvec[3] = (unsigned long)(arg3); \
5888 _argvec[4] = (unsigned long)(arg4); \
5889 _argvec[5] = (unsigned long)(arg5); \
5890 _argvec[6] = (unsigned long)(arg6); \
5891 _argvec[7] = (unsigned long)(arg7); \
5892 __asm__ volatile( \
5893 "lw $t9, 0(%1)\n\t" \
5894 "lw $a0, 4(%1)\n\t" \
5895 "lw $a1, 8(%1)\n\t" \
5896 "lw $a2,12(%1)\n\t" \
5897 "lw $a3,16(%1)\n\t" \
5898 "lw $a4,20(%1)\n\t" \
5899 "lw $a5,24(%1)\n\t" \
5900 "lw $a6,28(%1)\n\t" \
5901 VALGRIND_CALL_NOREDIR_T9 \
5902 "move %0, $a0\n" \
5903 : /*out*/ "=r" (_res) \
5904 : /*in*/ "r" (&_argvec[0]) \
5905 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5906 ); \
5907 lval = (__typeof__(lval)) _res; \
5908 } while (0)
5909
5910#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5911 arg7,arg8) \
5912 do { \
5913 volatile OrigFn _orig = (orig); \
5914 volatile unsigned long _argvec[9]; \
5915 volatile unsigned long _res; \
5916 _argvec[0] = (unsigned long)_orig.nraddr; \
5917 _argvec[1] = (unsigned long)(arg1); \
5918 _argvec[2] = (unsigned long)(arg2); \
5919 _argvec[3] = (unsigned long)(arg3); \
5920 _argvec[4] = (unsigned long)(arg4); \
5921 _argvec[5] = (unsigned long)(arg5); \
5922 _argvec[6] = (unsigned long)(arg6); \
5923 _argvec[7] = (unsigned long)(arg7); \
5924 _argvec[8] = (unsigned long)(arg8); \
5925 __asm__ volatile( \
5926 "lw $t9, 0(%1)\n\t" \
5927 "lw $a0, 4(%1)\n\t" \
5928 "lw $a1, 8(%1)\n\t" \
5929 "lw $a2,12(%1)\n\t" \
5930 "lw $a3,16(%1)\n\t" \
5931 "lw $a4,20(%1)\n\t" \
5932 "lw $a5,24(%1)\n\t" \
5933 "lw $a6,28(%1)\n\t" \
5934 "lw $a7,32(%1)\n\t" \
5935 VALGRIND_CALL_NOREDIR_T9 \
5936 "move %0, $a0\n" \
5937 : /*out*/ "=r" (_res) \
5938 : /*in*/ "r" (&_argvec[0]) \
5939 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5940 ); \
5941 lval = (__typeof__(lval)) _res; \
5942 } while (0)
5943
5944#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5945 arg7,arg8,arg9) \
5946 do { \
5947 volatile OrigFn _orig = (orig); \
5948 volatile unsigned long _argvec[10]; \
5949 volatile unsigned long _res; \
5950 _argvec[0] = (unsigned long)_orig.nraddr; \
5951 _argvec[1] = (unsigned long)(arg1); \
5952 _argvec[2] = (unsigned long)(arg2); \
5953 _argvec[3] = (unsigned long)(arg3); \
5954 _argvec[4] = (unsigned long)(arg4); \
5955 _argvec[5] = (unsigned long)(arg5); \
5956 _argvec[6] = (unsigned long)(arg6); \
5957 _argvec[7] = (unsigned long)(arg7); \
5958 _argvec[8] = (unsigned long)(arg8); \
5959 _argvec[9] = (unsigned long)(arg9); \
5960 __asm__ volatile( \
5961 "addiu $sp, $sp, -16 \n\t" \
5962 "lw $t9,36(%1) \n\t" \
5963 "sw $t9, 0($sp) \n\t" \
5964 "lw $t9, 0(%1) \n\t" \
5965 "lw $a0, 4(%1) \n\t" \
5966 "lw $a1, 8(%1) \n\t" \
5967 "lw $a2,12(%1) \n\t" \
5968 "lw $a3,16(%1) \n\t" \
5969 "lw $a4,20(%1) \n\t" \
5970 "lw $a5,24(%1) \n\t" \
5971 "lw $a6,28(%1) \n\t" \
5972 "lw $a7,32(%1) \n\t" \
5973 VALGRIND_CALL_NOREDIR_T9 \
5974 "move %0, $a0 \n\t" \
5975 "addiu $sp, $sp, 16 \n\t" \
5976 : /*out*/ "=r" (_res) \
5977 : /*in*/ "r" (&_argvec[0]) \
5978 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5979 ); \
5980 lval = (__typeof__(lval)) _res; \
5981 } while (0)
5982
5983#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5984 arg7,arg8,arg9,arg10) \
5985 do { \
5986 volatile OrigFn _orig = (orig); \
5987 volatile unsigned long _argvec[11]; \
5988 volatile unsigned long _res; \
5989 _argvec[0] = (unsigned long)_orig.nraddr; \
5990 _argvec[1] = (unsigned long)(arg1); \
5991 _argvec[2] = (unsigned long)(arg2); \
5992 _argvec[3] = (unsigned long)(arg3); \
5993 _argvec[4] = (unsigned long)(arg4); \
5994 _argvec[5] = (unsigned long)(arg5); \
5995 _argvec[6] = (unsigned long)(arg6); \
5996 _argvec[7] = (unsigned long)(arg7); \
5997 _argvec[8] = (unsigned long)(arg8); \
5998 _argvec[9] = (unsigned long)(arg9); \
5999 _argvec[10] = (unsigned long)(arg10); \
6000 __asm__ volatile( \
6001 "addiu $sp, $sp, -16 \n\t" \
6002 "lw $t9,36(%1) \n\t" \
6003 "sw $t9, 0($sp) \n\t" \
6004 "lw $t9,40(%1) \n\t" \
6005 "sw $t9, 4($sp) \n\t" \
6006 "lw $t9, 0(%1) \n\t" \
6007 "lw $a0, 4(%1) \n\t" \
6008 "lw $a1, 8(%1) \n\t" \
6009 "lw $a2,12(%1) \n\t" \
6010 "lw $a3,16(%1) \n\t" \
6011 "lw $a4,20(%1) \n\t" \
6012 "lw $a5,24(%1) \n\t" \
6013 "lw $a6,28(%1) \n\t" \
6014 "lw $a7,32(%1) \n\t" \
6015 VALGRIND_CALL_NOREDIR_T9 \
6016 "move %0, $a0 \n\t" \
6017 "addiu $sp, $sp, 16 \n\t" \
6018 : /*out*/ "=r" (_res) \
6019 : /*in*/ "r" (&_argvec[0]) \
6020 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6021 ); \
6022 lval = (__typeof__(lval)) _res; \
6023 } while (0)
6024
6025#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6026 arg6,arg7,arg8,arg9,arg10, \
6027 arg11) \
6028 do { \
6029 volatile OrigFn _orig = (orig); \
6030 volatile unsigned long _argvec[12]; \
6031 volatile unsigned long _res; \
6032 _argvec[0] = (unsigned long)_orig.nraddr; \
6033 _argvec[1] = (unsigned long)(arg1); \
6034 _argvec[2] = (unsigned long)(arg2); \
6035 _argvec[3] = (unsigned long)(arg3); \
6036 _argvec[4] = (unsigned long)(arg4); \
6037 _argvec[5] = (unsigned long)(arg5); \
6038 _argvec[6] = (unsigned long)(arg6); \
6039 _argvec[7] = (unsigned long)(arg7); \
6040 _argvec[8] = (unsigned long)(arg8); \
6041 _argvec[9] = (unsigned long)(arg9); \
6042 _argvec[10] = (unsigned long)(arg10); \
6043 _argvec[11] = (unsigned long)(arg11); \
6044 __asm__ volatile( \
6045 "addiu $sp, $sp, -16 \n\t" \
6046 "lw $t9,36(%1) \n\t" \
6047 "sw $t9, 0($sp) \n\t" \
6048 "lw $t9,40(%1) \n\t" \
6049 "sw $t9, 4($sp) \n\t" \
6050 "lw $t9,44(%1) \n\t" \
6051 "sw $t9, 8($sp) \n\t" \
6052 "lw $t9, 0(%1) \n\t" \
6053 "lw $a0, 4(%1) \n\t" \
6054 "lw $a1, 8(%1) \n\t" \
6055 "lw $a2,12(%1) \n\t" \
6056 "lw $a3,16(%1) \n\t" \
6057 "lw $a4,20(%1) \n\t" \
6058 "lw $a5,24(%1) \n\t" \
6059 "lw $a6,28(%1) \n\t" \
6060 "lw $a7,32(%1) \n\t" \
6061 VALGRIND_CALL_NOREDIR_T9 \
6062 "move %0, $a0 \n\t" \
6063 "addiu $sp, $sp, 16 \n\t" \
6064 : /*out*/ "=r" (_res) \
6065 : /*in*/ "r" (&_argvec[0]) \
6066 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6067 ); \
6068 lval = (__typeof__(lval)) _res; \
6069 } while (0)
6070
6071#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6072 arg6,arg7,arg8,arg9,arg10, \
6073 arg11,arg12) \
6074 do { \
6075 volatile OrigFn _orig = (orig); \
6076 volatile unsigned long _argvec[13]; \
6077 volatile unsigned long _res; \
6078 _argvec[0] = (unsigned long)_orig.nraddr; \
6079 _argvec[1] = (unsigned long)(arg1); \
6080 _argvec[2] = (unsigned long)(arg2); \
6081 _argvec[3] = (unsigned long)(arg3); \
6082 _argvec[4] = (unsigned long)(arg4); \
6083 _argvec[5] = (unsigned long)(arg5); \
6084 _argvec[6] = (unsigned long)(arg6); \
6085 _argvec[7] = (unsigned long)(arg7); \
6086 _argvec[8] = (unsigned long)(arg8); \
6087 _argvec[9] = (unsigned long)(arg9); \
6088 _argvec[10] = (unsigned long)(arg10); \
6089 _argvec[11] = (unsigned long)(arg11); \
6090 _argvec[12] = (unsigned long)(arg12); \
6091 __asm__ volatile( \
6092 "addiu $sp, $sp, -16 \n\t" \
6093 "lw $t9,36(%1) \n\t" \
6094 "sw $t9, 0($sp) \n\t" \
6095 "lw $t9,40(%1) \n\t" \
6096 "sw $t9, 4($sp) \n\t" \
6097 "lw $t9,44(%1) \n\t" \
6098 "sw $t9, 8($sp) \n\t" \
6099 "lw $t9,48(%1) \n\t" \
6100 "sw $t9,12($sp) \n\t" \
6101 "lw $t9, 0(%1) \n\t" \
6102 "lw $a0, 4(%1) \n\t" \
6103 "lw $a1, 8(%1) \n\t" \
6104 "lw $a2,12(%1) \n\t" \
6105 "lw $a3,16(%1) \n\t" \
6106 "lw $a4,20(%1) \n\t" \
6107 "lw $a5,24(%1) \n\t" \
6108 "lw $a6,28(%1) \n\t" \
6109 "lw $a7,32(%1) \n\t" \
6110 VALGRIND_CALL_NOREDIR_T9 \
6111 "move %0, $a0 \n\t" \
6112 "addiu $sp, $sp, 16 \n\t" \
6113 : /*out*/ "=r" (_res) \
6114 : /*in*/ "r" (&_argvec[0]) \
6115 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6116 ); \
6117 lval = (__typeof__(lval)) _res; \
6118 } while (0)
6119
6120#endif /* PLAT_nanomips_linux */
6121
6122/* ------------------------- mips64-linux ------------------------- */
6123
6124#if defined(PLAT_mips64_linux)
6125
6126/* These regs are trashed by the hidden call. */
6127#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6128"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6129"$25", "$31"
6130
6131/* These CALL_FN_ macros assume that on mips64-linux,
6132 sizeof(long long) == 8. */
6133
6134#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6135
6136#define CALL_FN_W_v(lval, orig) \
6137 do { \
6138 volatile OrigFn _orig = (orig); \
6139 volatile unsigned long long _argvec[1]; \
6140 volatile unsigned long long _res; \
6141 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6142 __asm__ volatile( \
6143 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6144 VALGRIND_CALL_NOREDIR_T9 \
6145 "move %0, $2\n" \
6146 : /*out*/ "=r" (_res) \
6147 : /*in*/ "0" (&_argvec[0]) \
6148 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6149 ); \
6150 lval = (__typeof__(lval)) (long)_res; \
6151 } while (0)
6152
6153#define CALL_FN_W_W(lval, orig, arg1) \
6154 do { \
6155 volatile OrigFn _orig = (orig); \
6156 volatile unsigned long long _argvec[2]; \
6157 volatile unsigned long long _res; \
6158 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6159 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6160 __asm__ volatile( \
6161 "ld $4, 8(%1)\n\t" /* arg1*/ \
6162 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6163 VALGRIND_CALL_NOREDIR_T9 \
6164 "move %0, $2\n" \
6165 : /*out*/ "=r" (_res) \
6166 : /*in*/ "r" (&_argvec[0]) \
6167 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6168 ); \
6169 lval = (__typeof__(lval)) (long)_res; \
6170 } while (0)
6171
6172#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6173 do { \
6174 volatile OrigFn _orig = (orig); \
6175 volatile unsigned long long _argvec[3]; \
6176 volatile unsigned long long _res; \
6177 _argvec[0] = _orig.nraddr; \
6178 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6179 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6180 __asm__ volatile( \
6181 "ld $4, 8(%1)\n\t" \
6182 "ld $5, 16(%1)\n\t" \
6183 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6184 VALGRIND_CALL_NOREDIR_T9 \
6185 "move %0, $2\n" \
6186 : /*out*/ "=r" (_res) \
6187 : /*in*/ "r" (&_argvec[0]) \
6188 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6189 ); \
6190 lval = (__typeof__(lval)) (long)_res; \
6191 } while (0)
6192
Sean Anderson3b004842022-03-23 14:04:48 -04006193#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6194 do { \
6195 volatile OrigFn _orig = (orig); \
6196 volatile unsigned long long _argvec[4]; \
6197 volatile unsigned long long _res; \
6198 _argvec[0] = _orig.nraddr; \
6199 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6200 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6201 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6202 __asm__ volatile( \
6203 "ld $4, 8(%1)\n\t" \
6204 "ld $5, 16(%1)\n\t" \
6205 "ld $6, 24(%1)\n\t" \
6206 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6207 VALGRIND_CALL_NOREDIR_T9 \
6208 "move %0, $2\n" \
6209 : /*out*/ "=r" (_res) \
6210 : /*in*/ "r" (&_argvec[0]) \
6211 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6212 ); \
6213 lval = (__typeof__(lval)) (long)_res; \
6214 } while (0)
6215
6216#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6217 do { \
6218 volatile OrigFn _orig = (orig); \
6219 volatile unsigned long long _argvec[5]; \
6220 volatile unsigned long long _res; \
6221 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6222 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6223 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6224 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6225 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6226 __asm__ volatile( \
6227 "ld $4, 8(%1)\n\t" \
6228 "ld $5, 16(%1)\n\t" \
6229 "ld $6, 24(%1)\n\t" \
6230 "ld $7, 32(%1)\n\t" \
6231 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6232 VALGRIND_CALL_NOREDIR_T9 \
6233 "move %0, $2\n" \
6234 : /*out*/ "=r" (_res) \
6235 : /*in*/ "r" (&_argvec[0]) \
6236 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6237 ); \
6238 lval = (__typeof__(lval)) (long)_res; \
6239 } while (0)
6240
6241#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6242 do { \
6243 volatile OrigFn _orig = (orig); \
6244 volatile unsigned long long _argvec[6]; \
6245 volatile unsigned long long _res; \
6246 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6247 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6248 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6249 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6250 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6251 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6252 __asm__ volatile( \
6253 "ld $4, 8(%1)\n\t" \
6254 "ld $5, 16(%1)\n\t" \
6255 "ld $6, 24(%1)\n\t" \
6256 "ld $7, 32(%1)\n\t" \
6257 "ld $8, 40(%1)\n\t" \
6258 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6259 VALGRIND_CALL_NOREDIR_T9 \
6260 "move %0, $2\n" \
6261 : /*out*/ "=r" (_res) \
6262 : /*in*/ "r" (&_argvec[0]) \
6263 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6264 ); \
6265 lval = (__typeof__(lval)) (long)_res; \
6266 } while (0)
6267
6268#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6269 do { \
6270 volatile OrigFn _orig = (orig); \
6271 volatile unsigned long long _argvec[7]; \
6272 volatile unsigned long long _res; \
6273 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6274 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6275 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6276 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6277 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6278 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6279 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6280 __asm__ volatile( \
6281 "ld $4, 8(%1)\n\t" \
6282 "ld $5, 16(%1)\n\t" \
6283 "ld $6, 24(%1)\n\t" \
6284 "ld $7, 32(%1)\n\t" \
6285 "ld $8, 40(%1)\n\t" \
6286 "ld $9, 48(%1)\n\t" \
6287 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6288 VALGRIND_CALL_NOREDIR_T9 \
6289 "move %0, $2\n" \
6290 : /*out*/ "=r" (_res) \
6291 : /*in*/ "r" (&_argvec[0]) \
6292 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6293 ); \
6294 lval = (__typeof__(lval)) (long)_res; \
6295 } while (0)
6296
6297#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6298 arg7) \
6299 do { \
6300 volatile OrigFn _orig = (orig); \
6301 volatile unsigned long long _argvec[8]; \
6302 volatile unsigned long long _res; \
6303 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6304 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6305 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6306 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6307 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6308 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6309 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6310 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6311 __asm__ volatile( \
6312 "ld $4, 8(%1)\n\t" \
6313 "ld $5, 16(%1)\n\t" \
6314 "ld $6, 24(%1)\n\t" \
6315 "ld $7, 32(%1)\n\t" \
6316 "ld $8, 40(%1)\n\t" \
6317 "ld $9, 48(%1)\n\t" \
6318 "ld $10, 56(%1)\n\t" \
6319 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6320 VALGRIND_CALL_NOREDIR_T9 \
6321 "move %0, $2\n" \
6322 : /*out*/ "=r" (_res) \
6323 : /*in*/ "r" (&_argvec[0]) \
6324 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6325 ); \
6326 lval = (__typeof__(lval)) (long)_res; \
6327 } while (0)
6328
6329#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6330 arg7,arg8) \
6331 do { \
6332 volatile OrigFn _orig = (orig); \
6333 volatile unsigned long long _argvec[9]; \
6334 volatile unsigned long long _res; \
6335 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6336 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6337 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6338 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6339 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6340 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6341 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6342 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6343 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6344 __asm__ volatile( \
6345 "ld $4, 8(%1)\n\t" \
6346 "ld $5, 16(%1)\n\t" \
6347 "ld $6, 24(%1)\n\t" \
6348 "ld $7, 32(%1)\n\t" \
6349 "ld $8, 40(%1)\n\t" \
6350 "ld $9, 48(%1)\n\t" \
6351 "ld $10, 56(%1)\n\t" \
6352 "ld $11, 64(%1)\n\t" \
6353 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6354 VALGRIND_CALL_NOREDIR_T9 \
6355 "move %0, $2\n" \
6356 : /*out*/ "=r" (_res) \
6357 : /*in*/ "r" (&_argvec[0]) \
6358 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6359 ); \
6360 lval = (__typeof__(lval)) (long)_res; \
6361 } while (0)
6362
6363#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6364 arg7,arg8,arg9) \
6365 do { \
6366 volatile OrigFn _orig = (orig); \
6367 volatile unsigned long long _argvec[10]; \
6368 volatile unsigned long long _res; \
6369 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6370 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6371 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6372 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6373 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6374 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6375 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6376 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6377 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6378 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6379 __asm__ volatile( \
6380 "dsubu $29, $29, 8\n\t" \
6381 "ld $4, 72(%1)\n\t" \
6382 "sd $4, 0($29)\n\t" \
6383 "ld $4, 8(%1)\n\t" \
6384 "ld $5, 16(%1)\n\t" \
6385 "ld $6, 24(%1)\n\t" \
6386 "ld $7, 32(%1)\n\t" \
6387 "ld $8, 40(%1)\n\t" \
6388 "ld $9, 48(%1)\n\t" \
6389 "ld $10, 56(%1)\n\t" \
6390 "ld $11, 64(%1)\n\t" \
6391 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6392 VALGRIND_CALL_NOREDIR_T9 \
6393 "daddu $29, $29, 8\n\t" \
6394 "move %0, $2\n" \
6395 : /*out*/ "=r" (_res) \
6396 : /*in*/ "r" (&_argvec[0]) \
6397 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6398 ); \
6399 lval = (__typeof__(lval)) (long)_res; \
6400 } while (0)
6401
6402#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6403 arg7,arg8,arg9,arg10) \
6404 do { \
6405 volatile OrigFn _orig = (orig); \
6406 volatile unsigned long long _argvec[11]; \
6407 volatile unsigned long long _res; \
6408 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6409 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6410 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6411 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6412 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6413 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6414 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6415 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6416 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6417 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6418 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6419 __asm__ volatile( \
6420 "dsubu $29, $29, 16\n\t" \
6421 "ld $4, 72(%1)\n\t" \
6422 "sd $4, 0($29)\n\t" \
6423 "ld $4, 80(%1)\n\t" \
6424 "sd $4, 8($29)\n\t" \
6425 "ld $4, 8(%1)\n\t" \
6426 "ld $5, 16(%1)\n\t" \
6427 "ld $6, 24(%1)\n\t" \
6428 "ld $7, 32(%1)\n\t" \
6429 "ld $8, 40(%1)\n\t" \
6430 "ld $9, 48(%1)\n\t" \
6431 "ld $10, 56(%1)\n\t" \
6432 "ld $11, 64(%1)\n\t" \
6433 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6434 VALGRIND_CALL_NOREDIR_T9 \
6435 "daddu $29, $29, 16\n\t" \
6436 "move %0, $2\n" \
6437 : /*out*/ "=r" (_res) \
6438 : /*in*/ "r" (&_argvec[0]) \
6439 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6440 ); \
6441 lval = (__typeof__(lval)) (long)_res; \
6442 } while (0)
6443
6444#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6445 arg6,arg7,arg8,arg9,arg10, \
6446 arg11) \
6447 do { \
6448 volatile OrigFn _orig = (orig); \
6449 volatile unsigned long long _argvec[12]; \
6450 volatile unsigned long long _res; \
6451 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6452 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6453 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6454 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6455 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6456 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6457 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6458 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6459 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6460 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6461 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6462 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6463 __asm__ volatile( \
6464 "dsubu $29, $29, 24\n\t" \
6465 "ld $4, 72(%1)\n\t" \
6466 "sd $4, 0($29)\n\t" \
6467 "ld $4, 80(%1)\n\t" \
6468 "sd $4, 8($29)\n\t" \
6469 "ld $4, 88(%1)\n\t" \
6470 "sd $4, 16($29)\n\t" \
6471 "ld $4, 8(%1)\n\t" \
6472 "ld $5, 16(%1)\n\t" \
6473 "ld $6, 24(%1)\n\t" \
6474 "ld $7, 32(%1)\n\t" \
6475 "ld $8, 40(%1)\n\t" \
6476 "ld $9, 48(%1)\n\t" \
6477 "ld $10, 56(%1)\n\t" \
6478 "ld $11, 64(%1)\n\t" \
6479 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6480 VALGRIND_CALL_NOREDIR_T9 \
6481 "daddu $29, $29, 24\n\t" \
6482 "move %0, $2\n" \
6483 : /*out*/ "=r" (_res) \
6484 : /*in*/ "r" (&_argvec[0]) \
6485 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6486 ); \
6487 lval = (__typeof__(lval)) (long)_res; \
6488 } while (0)
6489
6490#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6491 arg6,arg7,arg8,arg9,arg10, \
6492 arg11,arg12) \
6493 do { \
6494 volatile OrigFn _orig = (orig); \
6495 volatile unsigned long long _argvec[13]; \
6496 volatile unsigned long long _res; \
6497 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6498 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6499 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6500 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6501 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6502 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6503 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6504 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6505 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6506 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6507 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6508 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6509 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6510 __asm__ volatile( \
6511 "dsubu $29, $29, 32\n\t" \
6512 "ld $4, 72(%1)\n\t" \
6513 "sd $4, 0($29)\n\t" \
6514 "ld $4, 80(%1)\n\t" \
6515 "sd $4, 8($29)\n\t" \
6516 "ld $4, 88(%1)\n\t" \
6517 "sd $4, 16($29)\n\t" \
6518 "ld $4, 96(%1)\n\t" \
6519 "sd $4, 24($29)\n\t" \
6520 "ld $4, 8(%1)\n\t" \
6521 "ld $5, 16(%1)\n\t" \
6522 "ld $6, 24(%1)\n\t" \
6523 "ld $7, 32(%1)\n\t" \
6524 "ld $8, 40(%1)\n\t" \
6525 "ld $9, 48(%1)\n\t" \
6526 "ld $10, 56(%1)\n\t" \
6527 "ld $11, 64(%1)\n\t" \
6528 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6529 VALGRIND_CALL_NOREDIR_T9 \
6530 "daddu $29, $29, 32\n\t" \
6531 "move %0, $2\n" \
6532 : /*out*/ "=r" (_res) \
6533 : /*in*/ "r" (&_argvec[0]) \
6534 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6535 ); \
6536 lval = (__typeof__(lval)) (long)_res; \
6537 } while (0)
6538
6539#endif /* PLAT_mips64_linux */
6540
6541/* ------------------------------------------------------------------ */
6542/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6543/* */
6544/* ------------------------------------------------------------------ */
6545
6546/* Some request codes. There are many more of these, but most are not
6547 exposed to end-user view. These are the public ones, all of the
6548 form 0x1000 + small_number.
6549
6550 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6551 ones start at 0x2000.
6552*/
6553
6554/* These macros are used by tools -- they must be public, but don't
6555 embed them into other programs. */
6556#define VG_USERREQ_TOOL_BASE(a,b) \
6557 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6558#define VG_IS_TOOL_USERREQ(a, b, v) \
6559 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6560
6561/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6562 This enum comprises an ABI exported by Valgrind to programs
6563 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6564 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6565 relevant group. */
6566typedef
6567 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6568 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6569
6570 /* These allow any function to be called from the simulated
6571 CPU but run on the real CPU. Nb: the first arg passed to
6572 the function is always the ThreadId of the running
6573 thread! So CLIENT_CALL0 actually requires a 1 arg
6574 function, etc. */
6575 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6576 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6577 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6578 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6579
6580 /* Can be useful in regression testing suites -- eg. can
6581 send Valgrind's output to /dev/null and still count
6582 errors. */
6583 VG_USERREQ__COUNT_ERRORS = 0x1201,
6584
6585 /* Allows the client program and/or gdbserver to execute a monitor
6586 command. */
6587 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6588
6589 /* Allows the client program to change a dynamic command line
6590 option. */
6591 VG_USERREQ__CLO_CHANGE = 0x1203,
6592
6593 /* These are useful and can be interpreted by any tool that
6594 tracks malloc() et al, by using vg_replace_malloc.c. */
6595 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6596 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6597 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6598 /* Memory pool support. */
6599 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6600 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6601 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6602 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6603 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6604 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6605 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6606 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6607
6608 /* Allow printfs to valgrind log. */
6609 /* The first two pass the va_list argument by value, which
6610 assumes it is the same size as or smaller than a UWord,
6611 which generally isn't the case. Hence are deprecated.
6612 The second two pass the vargs by reference and so are
6613 immune to this problem. */
6614 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6615 VG_USERREQ__PRINTF = 0x1401,
6616 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6617 /* both :: char* fmt, va_list* vargs */
6618 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6619 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6620
6621 /* Stack support. */
6622 VG_USERREQ__STACK_REGISTER = 0x1501,
6623 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6624 VG_USERREQ__STACK_CHANGE = 0x1503,
6625
6626 /* Wine support */
6627 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6628
6629 /* Querying of debug info. */
6630 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6631
6632 /* Disable/enable error reporting level. Takes a single
6633 Word arg which is the delta to this thread's error
6634 disablement indicator. Hence 1 disables or further
6635 disables errors, and -1 moves back towards enablement.
6636 Other values are not allowed. */
6637 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6638
6639 /* Some requests used for Valgrind internal, such as
6640 self-test or self-hosting. */
6641 /* Initialise IR injection */
6642 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6643 /* Used by Inner Valgrind to inform Outer Valgrind where to
6644 find the list of inner guest threads */
6645 VG_USERREQ__INNER_THREADS = 0x1902
6646 } Vg_ClientRequest;
6647
6648#if !defined(__GNUC__)
6649# define __extension__ /* */
6650#endif
6651
Sean Anderson3b004842022-03-23 14:04:48 -04006652/* Returns the number of Valgrinds this code is running under. That
6653 is, 0 if running natively, 1 if running under Valgrind, 2 if
6654 running under Valgrind which is running under another Valgrind,
6655 etc. */
6656#define RUNNING_ON_VALGRIND \
6657 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6658 VG_USERREQ__RUNNING_ON_VALGRIND, \
6659 0, 0, 0, 0, 0) \
6660
Sean Anderson3b004842022-03-23 14:04:48 -04006661/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6662 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6663 since it provides a way to make sure valgrind will retranslate the
6664 invalidated area. Returns no value. */
6665#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6666 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6667 _qzz_addr, _qzz_len, 0, 0, 0)
6668
6669#define VALGRIND_INNER_THREADS(_qzz_addr) \
6670 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6671 _qzz_addr, 0, 0, 0, 0)
6672
Sean Anderson3b004842022-03-23 14:04:48 -04006673/* These requests are for getting Valgrind itself to print something.
6674 Possibly with a backtrace. This is a really ugly hack. The return value
6675 is the number of characters printed, excluding the "**<pid>** " part at the
6676 start and the backtrace (if present). */
6677
6678#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6679/* Modern GCC will optimize the static routine out if unused,
6680 and unused attribute will shut down warnings about it. */
6681static int VALGRIND_PRINTF(const char *format, ...)
6682 __attribute__((format(__printf__, 1, 2), __unused__));
6683#endif
6684static int
6685#if defined(_MSC_VER)
6686__inline
6687#endif
6688VALGRIND_PRINTF(const char *format, ...)
6689{
6690#if !IS_ENABLED(CONFIG_VALGRIND)
6691 (void)format;
6692 return 0;
6693#else /* CONFIG_VALGRIND */
6694#if defined(_MSC_VER) || defined(__MINGW64__)
6695 uintptr_t _qzz_res;
6696#else
6697 unsigned long _qzz_res;
6698#endif
6699 va_list vargs;
6700 va_start(vargs, format);
6701#if defined(_MSC_VER) || defined(__MINGW64__)
6702 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6703 VG_USERREQ__PRINTF_VALIST_BY_REF,
6704 (uintptr_t)format,
6705 (uintptr_t)&vargs,
6706 0, 0, 0);
6707#else
6708 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6709 VG_USERREQ__PRINTF_VALIST_BY_REF,
6710 (unsigned long)format,
6711 (unsigned long)&vargs,
6712 0, 0, 0);
6713#endif
6714 va_end(vargs);
6715 return (int)_qzz_res;
6716#endif /* CONFIG_VALGRIND */
6717}
6718
6719#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6720static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6721 __attribute__((format(__printf__, 1, 2), __unused__));
6722#endif
6723static int
6724#if defined(_MSC_VER)
6725__inline
6726#endif
6727VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6728{
6729#if !IS_ENABLED(CONFIG_VALGRIND)
6730 (void)format;
6731 return 0;
6732#else /* CONFIG_VALGRIND */
6733#if defined(_MSC_VER) || defined(__MINGW64__)
6734 uintptr_t _qzz_res;
6735#else
6736 unsigned long _qzz_res;
6737#endif
6738 va_list vargs;
6739 va_start(vargs, format);
6740#if defined(_MSC_VER) || defined(__MINGW64__)
6741 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6742 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6743 (uintptr_t)format,
6744 (uintptr_t)&vargs,
6745 0, 0, 0);
6746#else
6747 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6748 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6749 (unsigned long)format,
6750 (unsigned long)&vargs,
6751 0, 0, 0);
6752#endif
6753 va_end(vargs);
6754 return (int)_qzz_res;
6755#endif /* CONFIG_VALGRIND */
6756}
6757
Sean Anderson3b004842022-03-23 14:04:48 -04006758/* These requests allow control to move from the simulated CPU to the
6759 real CPU, calling an arbitrary function.
6760
6761 Note that the current ThreadId is inserted as the first argument.
6762 So this call:
6763
6764 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6765
6766 requires f to have this signature:
6767
6768 Word f(Word tid, Word arg1, Word arg2)
6769
6770 where "Word" is a word-sized type.
6771
6772 Note that these client requests are not entirely reliable. For example,
6773 if you call a function with them that subsequently calls printf(),
6774 there's a high chance Valgrind will crash. Generally, your prospects of
6775 these working are made higher if the called function does not refer to
6776 any global variables, and does not refer to any libc or other functions
6777 (printf et al). Any kind of entanglement with libc or dynamic linking is
6778 likely to have a bad outcome, for tricky reasons which we've grappled
6779 with a lot in the past.
6780*/
6781#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6782 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6783 VG_USERREQ__CLIENT_CALL0, \
6784 _qyy_fn, \
6785 0, 0, 0, 0)
6786
6787#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6788 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6789 VG_USERREQ__CLIENT_CALL1, \
6790 _qyy_fn, \
6791 _qyy_arg1, 0, 0, 0)
6792
6793#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6794 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6795 VG_USERREQ__CLIENT_CALL2, \
6796 _qyy_fn, \
6797 _qyy_arg1, _qyy_arg2, 0, 0)
6798
6799#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6800 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6801 VG_USERREQ__CLIENT_CALL3, \
6802 _qyy_fn, \
6803 _qyy_arg1, _qyy_arg2, \
6804 _qyy_arg3, 0)
6805
Sean Anderson3b004842022-03-23 14:04:48 -04006806/* Counts the number of errors that have been recorded by a tool. Nb:
6807 the tool must record the errors with VG_(maybe_record_error)() or
6808 VG_(unique_error)() for them to be counted. */
6809#define VALGRIND_COUNT_ERRORS \
6810 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6811 0 /* default return */, \
6812 VG_USERREQ__COUNT_ERRORS, \
6813 0, 0, 0, 0, 0)
6814
6815/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6816 when heap blocks are allocated in order to give accurate results. This
6817 happens automatically for the standard allocator functions such as
6818 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6819 delete[], etc.
6820
6821 But if your program uses a custom allocator, this doesn't automatically
6822 happen, and Valgrind will not do as well. For example, if you allocate
6823 superblocks with mmap() and then allocates chunks of the superblocks, all
6824 Valgrind's observations will be at the mmap() level and it won't know that
6825 the chunks should be considered separate entities. In Memcheck's case,
6826 that means you probably won't get heap block overrun detection (because
6827 there won't be redzones marked as unaddressable) and you definitely won't
6828 get any leak detection.
6829
6830 The following client requests allow a custom allocator to be annotated so
6831 that it can be handled accurately by Valgrind.
6832
6833 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6834 by a malloc()-like function. For Memcheck (an illustrative case), this
6835 does two things:
6836
6837 - It records that the block has been allocated. This means any addresses
6838 within the block mentioned in error messages will be
6839 identified as belonging to the block. It also means that if the block
6840 isn't freed it will be detected by the leak checker.
6841
6842 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6843 not set), or addressable and defined (if 'is_zeroed' is set). This
6844 controls how accesses to the block by the program are handled.
6845
6846 'addr' is the start of the usable block (ie. after any
6847 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6848 can apply redzones -- these are blocks of padding at the start and end of
6849 each block. Adding redzones is recommended as it makes it much more likely
6850 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6851 zeroed (or filled with another predictable value), as is the case for
6852 calloc().
6853
6854 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6855 heap block -- that will be used by the client program -- is allocated.
6856 It's best to put it at the outermost level of the allocator if possible;
6857 for example, if you have a function my_alloc() which calls
6858 internal_alloc(), and the client request is put inside internal_alloc(),
6859 stack traces relating to the heap block will contain entries for both
6860 my_alloc() and internal_alloc(), which is probably not what you want.
6861
6862 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6863 custom blocks from within a heap block, B, that has been allocated with
6864 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6865 -- the custom blocks will take precedence.
6866
6867 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6868 Memcheck, it does two things:
6869
6870 - It records that the block has been deallocated. This assumes that the
6871 block was annotated as having been allocated via
6872 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6873
6874 - It marks the block as being unaddressable.
6875
6876 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6877 heap block is deallocated.
6878
6879 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6880 Memcheck, it does four things:
6881
6882 - It records that the size of a block has been changed. This assumes that
6883 the block was annotated as having been allocated via
6884 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6885
6886 - If the block shrunk, it marks the freed memory as being unaddressable.
6887
6888 - If the block grew, it marks the new area as undefined and defines a red
6889 zone past the end of the new block.
6890
6891 - The V-bits of the overlap between the old and the new block are preserved.
6892
6893 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6894 and before deallocation of the old block.
6895
6896 In many cases, these three client requests will not be enough to get your
6897 allocator working well with Memcheck. More specifically, if your allocator
6898 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6899 will be necessary to mark the memory as addressable just before the zeroing
6900 occurs, otherwise you'll get a lot of invalid write errors. For example,
6901 you'll need to do this if your allocator recycles freed blocks, but it
6902 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6903 Alternatively, if your allocator reuses freed blocks for allocator-internal
6904 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6905
6906 Really, what's happening is a blurring of the lines between the client
6907 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6908 memory should be considered unaddressable to the client program, but the
6909 allocator knows more than the rest of the client program and so may be able
6910 to safely access it. Extra client requests are necessary for Valgrind to
6911 understand the distinction between the allocator and the rest of the
6912 program.
6913
6914 Ignored if addr == 0.
6915*/
6916#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6917 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6918 addr, sizeB, rzB, is_zeroed, 0)
6919
6920/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6921 Ignored if addr == 0.
6922*/
6923#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6924 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6925 addr, oldSizeB, newSizeB, rzB, 0)
6926
6927/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6928 Ignored if addr == 0.
6929*/
6930#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6931 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6932 addr, rzB, 0, 0, 0)
6933
6934/* Create a memory pool. */
6935#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6936 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6937 pool, rzB, is_zeroed, 0, 0)
6938
6939/* Create a memory pool with some flags specifying extended behaviour.
6940 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6941
6942 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6943 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6944 by the application as superblocks to dole out MALLOC_LIKE blocks using
6945 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6946 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6947 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6948 Note that the association between the pool and the second level blocks
6949 is implicit : second level blocks will be located inside first level
6950 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6951 for such 2 levels pools, as otherwise valgrind will detect overlapping
6952 memory blocks, and will abort execution (e.g. during leak search).
6953
6954 Such a meta pool can also be marked as an 'auto free' pool using the flag
6955 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6956 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6957 will automatically free the second level blocks that are contained
6958 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6959 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6960 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6961 in the first level block.
6962 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6963 without the VALGRIND_MEMPOOL_METAPOOL flag.
6964*/
6965#define VALGRIND_MEMPOOL_AUTO_FREE 1
6966#define VALGRIND_MEMPOOL_METAPOOL 2
6967#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6968 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6969 pool, rzB, is_zeroed, flags, 0)
6970
6971/* Destroy a memory pool. */
6972#define VALGRIND_DESTROY_MEMPOOL(pool) \
6973 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6974 pool, 0, 0, 0, 0)
6975
6976/* Associate a piece of memory with a memory pool. */
6977#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6978 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6979 pool, addr, size, 0, 0)
6980
6981/* Disassociate a piece of memory from a memory pool. */
6982#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6983 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6984 pool, addr, 0, 0, 0)
6985
6986/* Disassociate any pieces outside a particular range. */
6987#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6988 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6989 pool, addr, size, 0, 0)
6990
6991/* Resize and/or move a piece associated with a memory pool. */
6992#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6993 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6994 poolA, poolB, 0, 0, 0)
6995
6996/* Resize and/or move a piece associated with a memory pool. */
6997#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6998 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6999 pool, addrA, addrB, size, 0)
7000
7001/* Return 1 if a mempool exists, else 0. */
7002#define VALGRIND_MEMPOOL_EXISTS(pool) \
7003 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7004 VG_USERREQ__MEMPOOL_EXISTS, \
7005 pool, 0, 0, 0, 0)
7006
7007/* Mark a piece of memory as being a stack. Returns a stack id.
7008 start is the lowest addressable stack byte, end is the highest
7009 addressable stack byte. */
7010#define VALGRIND_STACK_REGISTER(start, end) \
7011 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7012 VG_USERREQ__STACK_REGISTER, \
7013 start, end, 0, 0, 0)
7014
7015/* Unmark the piece of memory associated with a stack id as being a
7016 stack. */
7017#define VALGRIND_STACK_DEREGISTER(id) \
7018 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7019 id, 0, 0, 0, 0)
7020
7021/* Change the start and end address of the stack id.
7022 start is the new lowest addressable stack byte, end is the new highest
7023 addressable stack byte. */
7024#define VALGRIND_STACK_CHANGE(id, start, end) \
7025 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7026 id, start, end, 0, 0)
7027
7028/* Load PDB debug info for Wine PE image_map. */
7029#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7030 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7031 fd, ptr, total_size, delta, 0)
7032
7033/* Map a code address to a source file name and line number. buf64
7034 must point to a 64-byte buffer in the caller's address space. The
7035 result will be dumped in there and is guaranteed to be zero
7036 terminated. If no info is found, the first byte is set to zero. */
7037#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7038 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7039 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7040 addr, buf64, 0, 0, 0)
7041
7042/* Disable error reporting for this thread. Behaves in a stack like
7043 way, so you can safely call this multiple times provided that
7044 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7045 to re-enable reporting. The first call of this macro disables
7046 reporting. Subsequent calls have no effect except to increase the
7047 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7048 reporting. Child threads do not inherit this setting from their
7049 parents -- they are always created with reporting enabled. */
7050#define VALGRIND_DISABLE_ERROR_REPORTING \
7051 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7052 1, 0, 0, 0, 0)
7053
7054/* Re-enable error reporting, as per comments on
7055 VALGRIND_DISABLE_ERROR_REPORTING. */
7056#define VALGRIND_ENABLE_ERROR_REPORTING \
7057 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7058 -1, 0, 0, 0, 0)
7059
7060/* Execute a monitor command from the client program.
7061 If a connection is opened with GDB, the output will be sent
7062 according to the output mode set for vgdb.
7063 If no connection is opened, output will go to the log output.
7064 Returns 1 if command not recognised, 0 otherwise. */
7065#define VALGRIND_MONITOR_COMMAND(command) \
7066 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7067 command, 0, 0, 0, 0)
7068
Sean Anderson3b004842022-03-23 14:04:48 -04007069/* Change the value of a dynamic command line option.
7070 Note that unknown or not dynamically changeable options
7071 will cause a warning message to be output. */
7072#define VALGRIND_CLO_CHANGE(option) \
7073 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7074 option, 0, 0, 0, 0)
7075
Sean Anderson3b004842022-03-23 14:04:48 -04007076#undef PLAT_x86_darwin
7077#undef PLAT_amd64_darwin
7078#undef PLAT_x86_win32
7079#undef PLAT_amd64_win64
7080#undef PLAT_x86_linux
7081#undef PLAT_amd64_linux
7082#undef PLAT_ppc32_linux
7083#undef PLAT_ppc64be_linux
7084#undef PLAT_ppc64le_linux
7085#undef PLAT_arm_linux
7086#undef PLAT_s390x_linux
7087#undef PLAT_mips32_linux
7088#undef PLAT_mips64_linux
7089#undef PLAT_nanomips_linux
7090#undef PLAT_x86_solaris
7091#undef PLAT_amd64_solaris
7092
7093#endif /* __VALGRIND_H */